diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000000..3c73f32a33 --- /dev/null +++ b/.clang-format @@ -0,0 +1,12 @@ +# Defines the Chromium style for automatic reformatting. +# http://clang.llvm.org/docs/ClangFormatStyleOptions.html +BasedOnStyle: Chromium +# This defaults to 'Auto'. Explicitly set it for a while, so that +# 'vector >' in existing files gets formatted to +# 'vector>'. ('Auto' means that clang-format will only use +# 'int>>' if the file already contains at least one such instance.) +Standard: Cpp11 +SortIncludes: true +--- +Language: ObjC +ColumnLimit: 100 diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index bbcbbe7d61..3b9ba1435e 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -8,13 +8,9 @@ assignees: '' --- **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** -A clear and concise description of what you want to happen. **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/stale.yml b/.github/stale.yml new file mode 100644 index 0000000000..e3304780b2 --- /dev/null +++ b/.github/stale.yml @@ -0,0 +1,20 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: 60 +# Number of days of inactivity before a stale issue is closed +daysUntilClose: 7 +# Issues with these labels will never be considered stale +exemptLabels: + - pinned + - security + - 🐛bug + - 🚀enhancement + - 😭help%20wanted +# Label to use when marking an issue as stale +staleLabel: wontfix +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. Thank you + for your contributions. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000000..eb76ef7cac --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,161 @@ +name: Build + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + dart-format-and-analyze-check: + name: Dart Format Check + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Dart Format Check + run: dart format lib/ test/ --set-exit-if-changed + - name: Import Sorter Check + run: flutter pub run import_sorter:main --no-comments --exit-if-changed + - name: Dart Analyze Check + run: flutter analyze + - name: Dart Test Check + run: flutter test + + build-for-android: + name: Build for Flutter Android + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '17.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for Android + working-directory: ./example + run: flutter build apk + + build-for-ios: + name: Build for Flutter iOS + runs-on: macos-latest + + steps: + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for iOS + working-directory: ./example + run: flutter build ios --release --no-codesign + + build-for-windows: + name: Build for flutter Windows + runs-on: windows-latest + + steps: + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v1 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for Windows + working-directory: ./example + run: flutter build windows --release + + build-for-macos: + name: Build for flutter macOS + runs-on: macos-latest + + steps: + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v1 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for macOS + working-directory: ./example + run: flutter build macos --release + + build-for-linux: + name: Build for Flutter Linux + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Run apt update + run: sudo apt-get update + - name: Install ninja-build libgtk-3-dev + run: sudo apt-get install -y ninja-build libgtk-3-dev + - name: Build for Linux + working-directory: ./example + run: flutter build linux + + build-for-elinux: + name: Build for Flutter Embedded Linux + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Run apt update + run: sudo apt-get update + - name: Install ninja-build libgtk-3-dev + run: sudo apt-get install -y ninja-build libgtk-3-dev + - name: Install elinux + run: git clone https://github.com/sony/flutter-elinux.git ~/flutter-elinux + - name: Build for elinux + working-directory: ./example + run: /home/runner/flutter-elinux/bin/flutter-elinux pub get && /home/runner/flutter-elinux/bin/flutter-elinux build elinux + + build-for-web: + name: Build for Flutter Web + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: build for Web + working-directory: ./example + run: flutter build web diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 0000000000..417f0f5aa5 --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,15 @@ +# .github/workflows/publish.yml +name: Publish to pub.dev + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+*' + +jobs: + publish: + permissions: + id-token: write # Required for authentication using OIDC + uses: dart-lang/setup-dart/.github/workflows/publish.yml@v1 + # with: + # working-directory: path/to/package/within/repository diff --git a/.gitignore b/.gitignore index 0899a2388d..0bab26c22a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,15 +1,11 @@ *.iml .idea .DS_Store -.packages -.vscode/.DS_Store example/pubspec.lock pubspec.lock example/ios/Podfile.lock -build GeneratedPluginRegistrant.java example/android/.gradle -example/android/gradle* WorkspaceSettings.xcsettings example/.flutter-plugins example/android/local.properties @@ -19,3 +15,44 @@ example/ios/Runner/GeneratedPluginRegistrant.m example/ios/Runner/GeneratedPluginRegistrant.h example/ios/Flutter/Generated.xcconfig example/ios/Flutter/flutter_export_environment.sh + +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# Flutter/Dart/Pub related +**/doc/api/ +.dart_tool/ +.flutter-plugins +.flutter-plugins-dependencies +.packages +.pub-cache/ +.pub/ +/build/ +/android/.gradle/ + + +android/.classpath +android/.settings/org.eclipse.buildship.core.prefs + +# VSCode +.vscode/ + +!webrtc_android.iml +!webrtc.iml + +# vs +*.pdb \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 331520a23d..6120e9fb8c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,785 @@ + # Changelog --------------------------------------------- +[0.14.1] - 2025-05-22 + +* [Android] fix: Recording bug (#1839) +* [Android] fix: calls in terminated mode by disabling orientation manager (#1840) +* [Android] fix: Wait for audio and video thread to fully stop to avoid corrupted recordings (#1836) + +[0.14.0] - 2025-05-06 + +* [iOS/Android]feat: Media Recorder implementation Android and iOS (#1810) +* [Wndows] fix: Pickup registrar for plugin by plugin registrar manager (#1752) +* [Linux] fix: add task runner for linux. (#1821) +* [iOS/macOS] fix: Fix deadlock when creating a frame cryptor on iOS/macOS. + +[0.13.1+hotfix.1] - 2025-04-07 + +* [Android] fix: Fix `clearAndroidCommunicationDevice` call blocking. + +[0.13.1] - 2025-04-03 + +* [Android] fix: remove setPreferredInputDevice when getUserAduio. (#1808) +* [Web] fix: race condition in RTCVideoRenderer for Web (#1805) +* [Android] fix: Migrate from onSurfaceDestroyed to onSurfaceCleanup for SurfaceProducer.Callback. (#1806) + +[0.13.0] - 2025-03-24 + +* [All] feat: add getBufferedAmount for DataChannel. (#1796) +* [Windows] fix: fixed non-platform thread call error. (#1795) + +[0.12.12+hotfix.1] - 2025-03-12 + +* [Android] fix: fixed video not rendered after resume from background. + +[0.12.12] - 2025-03-09 + +* [Android] feat: Migrate to the new Surface API. (#1726) +* [Chore] chore: fix sponsors logo and links. + +[0.12.11] - 2025-02-23 + +* [web] bump version for dart_webrtc. +* [web] fix: compile error for web with --wasm. + +[0.12.10] - 2025-02-18 + +* [web] bump version for dart_webrtc. +* [web] fix: compile error for web with --wasm. + +[0.12.9] - 2025-02-13 + +* [iOS] feat: Add option to start capture without broadcast picker (#1764) + +[0.12.8] - 2025-02-07 + +* [Dart] feat: expose rtc video value (#1754) +* [Dart] chore: bump webrtc-interface to 1.2.1. + +[0.12.7] - 2025-01-24 + +* [iOS] More robustness for video renderer. (#1751) + +[0.12.6] - 2025-01-20 + +* [iOS] fix In-app screen recording. +* [Android] fix: avoid crashes when surfaceTextureHelper is null. (#1743) + +[0.12.5+hotfix.2] - 2024-12-25 + +* [iOS] fix: Audio route issue for iOS. + +[0.12.5+hotfix.1] - 2024-12-25 + +* [iOS/macOS] fix: Pass MediaConstraints for getUserAudio. + +[0.12.5] - 2024-12-23 + +* [iOS/Android] Fixed buf for screen capture. +* [Android] Fixed first frame flickering. + +[0.12.4] - 2024-12-16 + +* [iOS/Android] add FocusMode/ExposureMode settings for mobile. (#1435) +* [Dart] fix compiler errors. +* [eLinux] add $ORIGIN to rpath in elinux (#1720). + +[0.12.3] - 2024-11-29 + +* [iOS/Android/macOS] feat: Expose AV Processing and Sink native APIs. + +[0.12.2] - 2024-11-26 + +* [Dart] fix: race condition during video renderer initialization. (#1692) +* [Darwin] fix: Add mutex lock to avoid pixelbuffer access contention. (#1694) + +[0.12.1+hotfix.1] - 2024-10-25 + +* [iOS] fix: fix switch camera broken on iOS. + +* [web] fix: add stub WebRTC.initialize for web. +* [Docs] Fixing proguard rules link (#1686) +* [iOS/Android] feat: support unprocessed audio (#825) +* [eLinux] feat: add support for eLinux (#1338) + +[0.12.0+hotfix.1] - 2024-10-18 + +* [macOS] fix compilation error for macOS. + +[0.12.0] - 2024-10-16 + +* [iOS/macOS] Fix memory leak for iOS/macOS. +* [iOS] Support MultiCam Session for iOS. + +[0.11.7] - 2024-09-04 + +* [Web] Bump dart_webrtc to 1.4.9. +* [Web] Bump web version to 1.0.0. + +[0.11.6+hotfix.1] - 2024-08-07 + +* [iOS] Fixed PlatformView not rendering after resuming from background. + +[0.11.6] - 2024-08-02 + +* [Web] change VideoElement to HTMLVideoElement. +* [iOS] added shared singleton for FlutterWebRTCPlugin (#1634) +* [iOS] Using av samplebuffer for PlatformView (#1635) + +[0.11.5] - 2024-07-23 + +* [Android] Report actual sizes for camera media stream track (#1636). + +[0.11.4] - 2024-07-19 + +* [Android] fix issue for camera switching. + +[0.11.3] - 2024-07-12 + +* Bump version for libwebrtc. + +[0.11.2] - 2024-07-09 + +* [Windows] fix crash for windows. +* [Darwin] bump WebRTC version for darwin. + +[0.11.1] - 2024-06-17 + +* [macOS] Downgrade macOS system dependencies to 10.14. + +[0.11.0] - 2024-06-17 + +* [Native] upgrade libwebrtc to m125.6422. + +[0.10.8] - 2024-06-05 + +* [iOS] fix(platform_view): fit cover works wrong (#1593) +* [iOS/macOS] fix: Fix the issue that the video is not displayed when using 'video': true (#1592) +* [Web] bump dart_webrtc to 1.4.6. + +[0.10.7] - 2024-05-30 + +* [iOS] feat: add PlatformView Renderer for iOS. (#1569) +* [iOS] fix: audio session control for iOS. (#1590) + +[0.10.6] - 2024-05-13 + +* [Web] Some important fixes for web. + +[0.10.5] - 2024-05-13 + +* [Android] fix: make MediaDeviceInfo (Audio deviceId, label, groupId) consistent. (#1583) + +[0.10.4] - 2024-05-06 + +* [iOS/macOS] chore: update swift webrtc sdks to 114.5735.10 (#1576) +* [Android] fix: actually call selectAudioOutput in enableSpeakerButPreferBluetooth +* [iOS] fix: remember speakerphone mode for ensureAudioSession (#1568) +* [Windows/Linux] Fix handling of unimplemented method (#1563) + +[0.10.3] - 2024-04-09 + +* [iOS/macOS] Fix compilation warning for iOS/macOS. + +[0.10.2] - 2024-04-08 + +* [Native/Web] feat: add keyRingSize/discardFrameWhenCryptorNotReady to KeyProviderOptions. + +[0.10.1] - 2024-04-08 + +* [Web] fix renderer issue for web. + +[0.10.0] - 2024-04-08 + +* [Web] move to package:web. + +[0.9.48+hotfix.1] - 2024-02-05 + +* [Android] bump version for libwebrtc. + +[0.9.48] - 2024-02-05 + +* [Android] bump version for libwebrtc. +* [iOS] Supports ensureAudioSsession method for iOS only. (#1514) +* [Android] fix android wrong display size. (#1508). + +[0.9.47] - 2023-11-29 + +* [Windows/Linux] fix: Check the invalid value of candidate and session description. (#1484) +* [Windows/Linux/macOS] fix: screen sharing issue for desktop. +* [Web] fix: platformViewRegistry getter is deprecated (#1485) +* [Dart] Throw exception for set src object (#1491). + +[0.9.46] - 2023-10-25 + +* [iOS/macOS] fix: Crop video output size to target settings. (#1472) +* [Android] fix: Fix bluetooth sco not stopping after room disconnect (#1475) + +[0.9.45] - 2023-09-27 + +* [iOS/macOS] fix: send message on non-platform thread. +* [Windows] fix: fix setSrcObj with trackId for Windows. +* [Windows] fix: fix "unlock of unowned mutex" error when call "captureFrame()" func on windows. + +[0.9.44] - 2023-09-25 + +* [Windows] fix: fix Renderer bug for Windows. +* [Native] fix: Use independent threads to process frame encryption/decryption +* [Native] fix: Correct handle SIF frame +* [Native] fix: Fix a fault tolerance judgment failure + +[0.9.43] - 2023-09-20 + +* [Native] fix: send frame cryptor events from signaling thread. +* [Native] fix: h264 freeze when using E2EE. + +[0.9.42+hotfix.1] - 2023-09-15 + +* [Windows/Linux] fix: fix cannot start vp8/h264 encoder correctly. + +[0.9.42] - 2023-09-15 + +* [Dart/Native] feat: add more framcryptor api (#1444) +* [Dart/Native] feat: support scalability mode (#1442) +* [Android] fix: Turn off audio routing in non communication modes (#1438) + +* [Android] feat: Add more control over android audio options. + +[0.9.41] - 2023-08-30 + +* [Android] feat: Add more control over android audio options. + +[0.9.40] - 2023-08-16 + +* [Windows/Linux] fix: nullptr checking for sender/receiver for getStats. + +[0.9.39] - 2023-08-14 + +* [Dart/Native] feat: add async methods for getting pc states. + +[0.9.38] - 2023-08-11 + +* [Android] fix: Expose helper to clearCommunicationDevice on AudioManager.AUDIOFOCUS_LOSS +* [Android] feat: support force SW codec list for android, and disable HW codec for VP9 by default. +* [Android] fix: issue for audio device switch (#1417) +* [Android/iOS] feat: Added setZoom method to support camera zooming while streaming. (#1412). + +[0.9.37] - 2023-08-07 + +* [Native] fix: Skip set_sdp_fmtp_line if sdpFmtpLine is empty. +* [Android] fix: fix android earpiece not being replaced after wired headset is disconnected. +* [Dart] fix: partially rebuild RTCVideoView when renderVideo value changes. +* [Android] feat: expose android audio modes. +* [Android] feat: support forceSWCodec for Android. +* [Linux] fix: add $ORIGIN to rpath. + +[0.9.36] - 2023-07-13 + +* [Native] upgrade libwebrtc to m114.5735.02. +* [Windows/Linux] Add implementation to MediaStreamTrack.captureFrame() for linux/windows. +* [Darwin/Android] Support to ignore network adapters used for ICE on Android, iOS and macOS. + +[0.9.35] - 2023-06-30 + +* [iOS] feat: expose audio mode for ios. +* [Darwin] fix: compiler warning for Darwin. +* [Dart] Fix setMicrophoneMute() not awaitable. +* [Native] Update libwebrtc to m114. +* [Dart/Web] Separate frame cryptor to dart-webrtc. + +[0.9.34] - 2023-06-14 + +* [Web] fix facingMode for flutter web mobile. + +[0.9.33] - 2023-06-08 + +* [Android] fix frame drops for android. + +[0.9.32] - 2023-05-30 + +* [Android] fix issue for get user audio. +* [Android] fix getStats throw LinkedHasMap exception. + +[0.9.31] - 2023-05-23 + +* [Darwin] Improve iOS/macOS H264 encoder (Upgrade to WebRTC-SDK M104.5112.17). + +[0.9.30+hotfix.2] - 2023-05-18 + +* [Windows/Linux] fix bug for eventchannel proxy. +* [Windows/Linux] fix: crash for pc.close/dispose on win/linux. (#1360) + +[0.9.30+hotfix.1] - 2023-05-17 + +* [Windows/Linux] Fix compiler error. + +[0.9.30] - 2023-05-16 + +* [Darwin] Handle exceptions for frame rate settings for darinw. (#1351) +* [Android] Fix bluetooth device enumerate. (#1349) +* [Darwin/Android/Windows/Linux] Added maxIPv6Networks configuration (#1350) +* [iOS] Fix: broadcast extension not found fallback logic (#1347) +* [Android] Move the call of capturer.stopCapture() outside the main thread to avoid blocking of flutter method call. +* [Windows/Linux] Fix the crash issue of video room (#1343) + +[0.9.29+hotfix.1] - 2023-05-08 + +* [Android] fix: application context null when app is terminated. +* [Android/iOS] feat: add way to enable speaker but prefer bluetooth. + +[0.9.28] - 2023-05-08 + +* [Windows/Linux] fix: use the correct transceiver id. +* [Windows/Linux] fix: Support restart camera for Windows/Linux. + +[0.9.27] - 2023-04-27 + +* [Darwin/Android/Windows/Linux] feat: framecryptor. +* [Windows/Linux] Fix the type/code mistake. +* [Windows/Linux] Fix uneffective RTPTransceiver::GetCurrentDirection. +* [Windows/Linux] RTPtransceiver::getCurrentDirection returns correct value. + +[0.9.26] - 2023-04-16 + +* [iOS/macOS] motify h264 profile-level-id to support high resolution. +* [Dawrin/Android/Windows] feat: add RTCDegradationPreference to RTCRtpParameters. + +[0.9.25] - 2023-04-10 + +* [Dawrin/Android/Windows] Add `addStreams` to `RTCRtpSender` +* [Android] fix: label for Wired Headset. (#1305) +* [Dawrin/Android/Windows] Feat/media stream track get settings (#1294) +* [Android/iOS] Fix track lookup in the platform specific code for Android and iOS (#1289) +* [iOS] fix: ICE Connectivity doesn't establish with DualSIM iPhones. +* [Android] Switch to webrtc hosted on maven central (#1288) + +[0.9.24] - 2023-03-07 + +* [iOS] avaudiosession mode changed to AVAudioSessionModeVideoChat (#1285) +* [macOS] fix memory leak for screen capture. + +[0.9.23] - 2023-02-17 + +* [Windows/Linux] Updated libwebrtc binary for windows/linux to fix two crashes. + +[0.9.22] - 2023-02-14 + +* [iOS] fix: Without any setActive for rtc session, libwebrtc manages the session counter by itself. (#1266) +* [dart] fix: remove rtpsender.dispose. +* [web] fix video renderer issue for safari. +* [macOS] Fixed macOS desktop capture crash with simulcast enabled. +* [macOS] Fix the crash when setting the fps of the virtual camera. + +[0.9.21] - 2023-02-10 + +* [Web] Fix: RTCRtpParameters.fromJsObject for Firefox. +* [Web] Add bufferedamountlow. +* [Android] Fixed frame capturer returning images with wrong colors (#1258). +* [Windows] bug fix. + +[0.9.20] - 2023-02-03 + +* [Dawrin/Android/Windows] Add getCapabilities/setCodecPreferences methods +* [Darwin] buffered amount +* [Linux] Fixed audio device name buffer size +* [Android] Start audioswitch and only activate it when needed +* [Darwin] Fix typo which broke GcmCryptoSuites + +[0.9.19] - 2023-01-10 + +* [Dart] Fix getStats: change 'track' to 'trackId' (#1199) +* [Android] keep the audio switch after stopping (#1202) +* [Dart] Enhance RTC video view with placeholder builder property (#1206) +* [Android] Use forked version of audio switch to avoid BLUETOOTH_CONNECT permission (#1218) + +[0.9.18] - 2022-12-12 + +* [Web] Bump dart_webrtc to 1.0.12, Convert iceconnectionstate to connectionstate for Firefox. +* [Android] Start AudioSwitchManager only when audio track added (fix #1163) (#1196) +* [iOS] Implement detachFromEngineForRegistrar (#1192) +* [iOS] Handle Platform Exception on addCandidate (#1190) +* [Native] Code format with clang-format. + +[0.9.17] - 2022-11-28 + +* [Android] Update android webrtc version to 104.5112.05 +* [iOS] Update WebRTC.xframework version to 104.5112.07 + +[0.9.16] - 2022-11-14 + +* [Linux] Fixed compiler error for flutter 3.3.8. +* [Linux] Remove 32-bit precompiled binaries. +* [Linux] Supports linux-x64 and linux-arm64. + +[0.9.15] - 2022-11-13 + +* [Linux] Add Linux Support. + +[0.9.14] - 2022-11-12 + +* [iOS] Fix setSpeakerOn has no effect after change AVAudioSession mode to playback. + +[0.9.13] - 2022-11-12 + +* [Dart] Change MediaStream.clone to async. +* [iOS] Fixed the bug that the mic indicator light was still on when mic recording was stopped. +* [iOS/macOS/Android/Windows] Allow sdpMLineIndex to be null when addCandidate. +* [macOS] Frame capture support for MacOS. +* [Android] Add enableCpuOveruseDetection configuration (#1165). +* [Android] Update comments (#1164). + +[0.9.12] - 2022-11-02 + +* [iOS] Fixed the problem that iOS earphones and speakers do not switch. +* [Windows] fix bug for rtpSender->RemoveTrack/pc->getStats. +* [iOS] Return groupId. +* [Web] MediaRecorder.startWeb() should expose the timeslice parameter. +* [iOS] Implement RTCPeerConnectionDelegate didRemoveIceCandidates method. +* [iOS] fix disposing Broadcast Sharing stream. + +[0.9.11] - 2022-10-16 + +* [iOS] fix audio route/setSpeakerphoneOn issues. +* [Windows] fix: Have same remote streams id then found wrong MediaStream. +* [Dart] feat: RTCVideoRenderer supports specific trackId when setting MediaStream. + +[0.9.9+hotfix.1] - 2022-10-12 + +* [Darwin] Fix getStats for darwin when trackId is NSNull. + +[0.9.9] - 2022-10-12 + +* [Darwin/Android/Windows] Support getStats for RtpSender/RtpReceiver (Migrate from Legacy to Standard Stats for getStats). +* [Android] Dispose streams and connections. +* [Android] Support rtp transceiver direction type 4. +* [Web] Update dart_webrtc dependendency. + +[0.9.8] - 2022-09-30 + +* [Android] fix: Make sure local stream/track dispose correctly. +* [Android] Remove bluetooth permission on peerConnectionInit. +* [iOS] Fix system sound interruption on iOS (#1099). +* [Android] Fix: call mode on app start (#1097). +* [Dart] Avoid renderer initialization multiple times (#1067). + +[0.9.7] - 2022-09-13 + +* [Windows] Support sendDtmf. +* [Windows] Fixed getStats. + +[0.9.6] - 2022-09-06 + +* [Dart] The dc created by didOpenDataChannel needs to set state to open. +* [Dart] Added callback onFirstFrameRendered. + +[0.9.5] - 2022-08-30 + +* [Android] fix: Fix crash when using multiple renderers. +* [Android] fix bug with track dispose cannot close video +* [Andorid/iOS/macOS/Windows] Fix bug of missing events in data-channel. + +[0.9.4] - 2022-08-22 + +* [Andorid/iOS/macOS/Windows] New audio input/output selection API, ondevicechange event is used to monitor audio device changes. + +[0.9.3] - 2022-08-15 + +* [Windows/macOS] Fix UI freeze when getting thumbnails. + +[0.9.2] - 2022-08-09 + +* [Android] update libwebrtc to com.github.webrtc-sdk:android:104.5112.01. +* [iOS/macOS] update WebRTC-SDK to 104.5112.02. +* [Windows] update libwebrtc.dll to 104.5112.02. + +[0.9.1] - 2022-08-01 + +* [iOS] fix : iOS app could not change camera resolutions cause by wrong datatype in the video Contraints. +* [Darwin] bump version for .podspec. + +[0.9.0] - 2022-07-27 + +* [macOS] Added screen-sharing support for macOS +* [Windows] Added screen-sharing support for Windows +* [iOS/macOS] fix: Fix compile warning for Darwin +* [Darwin/Android/Windows] fix: Fix typo peerConnectoinEvent -> peerConnectionEvent for EventChannel name (#1019) + +[0.8.12] - 2022-07-15 + +* [Darwin]: fix: camera release. + +[0.8.11] - 2022-07-11 + +* [Windows] Fix variant exception of findLongInt. (#990) +* [Windows] fix unable to get username/credential when parsing iceServers containing urls +* [iOS] Fix RTCAudioSession properties set with libwebrtc m97, Fixes #987. + +[0.8.10] - 2022-06-28 + +* [iOS] IPC Broadcast Upload Extension support for Screenshare + +[0.8.9] - 2022-06-08 + +* [Android] Fixes DataChannel issue described in #974 +* [iOS] Fixes DataChannel issue described in #974 +* [Dawrin/Android/Windows] Split data channel's webrtc id from our internal id (#961) +* [Windows] Update to m97. +* [Windows] Add PeerConnectionState +* [Windows] Fix can't open mic alone when built-in AEC is enabled. + +[0.8.8] - 2022-05-31 + +* [Android] Added onBufferedAmountChange callback which will return currentBuffer and changedBuffer and implemented bufferedAmount. +* [Android] Added onBufferedAmountLow callback which will return currentBuffer ans will be called if bufferedAmountLowThreshold is set a value. + +[0.8.7] - 2022-05-18 + +* [iOS/macOS] fix: Use RTCYUVHelper instead of external libyuv library (#954). +* [iOS/macOS] Flutter 3.0 crash fixes, setStreamHandler on main thread (#953) +* [Android] Use mavenCentral() instead of jcenter() (#952) +* [Windows] Use uint8_t* instead of string in DataChannel::Send method, fix binary send bug. +* [Android] fix: "Reply already submitted" error and setVolume() not working on remote streams. + +[0.8.6] - 2022-05-08 + +* [Web/Android/iOS/macOS] Support null tracks in replaceTrack/setTrack. +* [macOS] Remove absolute path from resolved spec to make checksum stable. +* [Android] Android 12 bluetooth permissions. +* [Dart] fix wrong id type for data-channel. +* [Android] Release i420 Buffer in FrameCapturer. + +[0.8.5] - 2022-04-01 + +* [Dart] Expose RTCDataChannel.id (#898) +* [Android] Enable H264 high profile for SimulcastVideoEncoderFactoryWrapper (#890) + +[0.8.4] - 2022-03-28 + +* [Android] Fix simulcast factory not sending back EncoderInfo (#891) +* [Android] fix: correct misspell in method screenRequestPermissions (#876) + +[0.8.3] - 2022-03-01 + +* [Android/iOS] Update android/ios webrtc native sdk versions. +* [Windows] Feature of selecting i/o audio devices by passing sourceId and/or deviceId constraints (#851). + +[0.8.2] - 2022-02-08 + +* [Android/iOS/macOS/Web] Add restartIce. + +[0.8.1] - 2021-12-29 + +* [Android/iOS] Bump webrtc-sdk version to 93.4577.01. + +[0.8.0] - 2021-12-05 + +* [Dart] Refactor: Use webrtc interface. (#777) +* [iOS] Fix crashes for FlutterRPScreenRecorder stop. +* [Web] Don't stop tracks when disposing MediaStream (#760) +* [Windows] Add the necessary parameters for onRemoveTrack (#763) +* [Example] Properly start foreground service in example (#764) +* [Android] Fix crash for Android, close #757 and #734. +* [Dart] Fix typo in deprecated annotations. +* [iOS] Fix IOS captureFrame and add support for remote stream captureFrame (#778) +* [Windows] Fix parsing stun configuration (#789) +* [Windows] Fix mute (#792) +* [iOS/Android/Windows] New video constraints syntax (#790) + +[0.7.1] - 2021-11-04 + +* [iOS/macOS] Update framework. +* [Android] Update framework. +* [Windows] Implement mediaStreamTrackSetEnable (#756). +* [iOS/macOS] Enable audio capture when acquiring track. +* [Android] Call stopCaptureWithCompletionHandler instead (#748) +* [Windows] Fix bug for windows. + +[0.7.0+hotfix.2] - 2021-10-21 + +* [iOS/macOS] Update .podspec for Darwin. + +[0.7.0+hotfix.1] - 2021-10-21 + +* [Android] Fix bug for createDataChannel. + +[0.7.0] - 2021-10-20 + +* [Android] Enable Android simulcast (#731) +* [macOS] Use pre-compiled WebRTC for macOS. (#717) +* [iOS/macOS] Fix the correct return value of createDataChannel under darwin. +* [Windows] Fix using the wrong id to listen datachannel events. +* [Dart] Fix(mediaStreamTrackSetEnable): remote track is unavaiable (#723). + +[0.6.10+hotfix.1] - 2021-10-01 + +* [Web] Fix compiler errors for web. + +[0.6.10] - 2021-10-01 + +* [iOS] Fix bug for RtpTransceiver.getCurrentDirection. +* [Dart] Improve MethodChannel calling. + +[0.6.9] - 2021-10-01 + +* [iOS] Update WebRTC build (#707). +* [Windows] Add Unified-Plan support for windows. (#688) +* [iOS] Improve audio handling on iOS (#705) + +[0.6.8] - 2021-09-27 + +* [Android] Use ApplicationContext to verify permissions when activity is null. +* [iOS] Add support for lightning microphone. (#693) +* [Windows] Fix FlutterMediaStream::GetSources. +* [Web] Fix Flutter 2.5.0 RTCVideoRendererWeb bug (#681) +* [Web] Bug fix (#679) + +[0.6.7] - 2021-09-08 + +* [Android] upgrade webrtc sdk to m92.92.4515. +* [Web] `addTransceiver` bug fix (#675) +* [Web] Use low-level jsutil to call createOffer/createrAnswer to solve the issue on safari/firefox. +* [Dart] Fix currentDirection/direction implementation confusion. + +[0.6.6] - 2021.09.01 + +* [Sponsorship] Thanks for LiveKit sponsorship. +* [Web] Avoid removing all audio elements when stopping a single video renderer (#667) +* [Web] Properly cleanup srcObject to avoid accidental dispose +* [Dart] Removed warnings (#647) +* [Web] Switch transferFromImageBitmap to be invoked using js.callMethod (#631) +* [Web] Fix sending binary data over DataChannel in web implementation. (#634) +* [Darwin] Nullable return for GetLocalDescription/GetRemoteDiscription +* [Darwin] Fix incorrect argument name at RTCRtpSender (#600) + +[0.6.5] - 2021.06.18 + +* [Android] Falling back to the first available camera fix #580 +* [Android] Fix application exit null-pointer exception (#582) +* [Dart] Add label getter to DataChannel Interface (#585) +* [Dart] Fix exception raised at RTCPeerConnection.removeTrack and RTCRtpSender.setParameters (#588) +* [Dart] Fix: null check (#595) +* [Dart] Fix: null check for RTCRtpTransceiverNative.fromMap + +[0.6.4] - 2021.05.02 + +* [Android] Fix getting screen capture on Huawei only successful in the first time. (#523) +* [Android] Add configuration "cryptoOptions" in parseRTCConfiguration(). +* [Dart] Change getLocalDescription,getRemoteDescription,RTCRtpSenderWeb.track returns to nullable. +* [Dart] Fixed bug in RTCPeerConnectionWeb.removeTrack. +* [Dart] Change MediaStreamTrack.captureFrame returns to ByteBuffer to compatible with web API. +* [Dart] Do null safety check in onRemoveStream,onRemoveTrack and MediaStream.getTrackById. +* [Android] Add reStartCamera method when the camera is preempted by other apps. +* [Web] Refactored RTCVideoRendererWeb and RTCVideoViewWeb, using video and audio HTML tags to render audio and video streams separately. + +[0.6.3] - 2021.04.03 + +* [Dart] Change RTCRtpSender.track to nullable. +* [Web] Fix RTCVideoView/Renderer pauses when changing child in IndexedStack. + +[0.6.2] - 2021.04.02 + +* [Dart] Use enumerateDevices instead of getSources. +* [Android] Use flutter_background to fix screen capture example. + +[0.6.1] - 2021.04.02 + +* [Darwin] Fixed getting crash when call setLocalDescription multiple time. +* [Dart] Get more pub scores. + +[0.6.0] - 2021.04.01 + +* [Sponsorship] Thanks for Stream sponsorship (#475) +* [Android] Fixed a crash when switching cameras on Huawei devices. +* [Windows] Correct signalingState & iceConnectionState event name on Windows. (#502) +* [Dart] Clip behaviour. (#511) +* [Dart] null-safety (@wer-mathurin Thanks for the hard work). +* [Dart] Fix setMicrophoneMute (#466) +* [Web] Fix pc.addTransceiver method, fix RTCRtpMediaType to string, fix (#437) +* [Android] fix sdpSemantics issue (#478) + +[0.6.0-nullsafety.0] - 2021.03.22 + +* [Dart] null-safety (@wer-mathurin Thanks for the hard work). + +[0.5.8] - 2021.01.26 + +* [Web] Support selecting audio output. +* [Web] Fix issue for getDisplayMedia with audio. +* [Windows] Add Windows Support. +* [macOS] Fix compile error for macos. +* [Dart] Add FilterQuality to RTCVideoView. +* [iOS/Android] Unified plan gettracks. +* [iOS/Android] Bluetooth switching enabled when switching `enableSpeakerphone` value (if they are connected). #201 (#435) +* [Android] Increase necessary Android min SDK version after add Unified-Plan API. + +[0.5.7] - 2020.11.21 + +* [Web] Fix events callback for peerconnection. + +[0.5.6] - 2020.11.21 + +* [Android/Darwin/Web] Add onResize event for RTCVideoRenderer. + +[0.5.5] - 2020.11.21 + +* [Android/Darwin] Fix Simulcast issue. + +[0.5.4] - 2020.11.21 + +* [Native/Web] Add videoWidth/videoHeight getter for RTCVideoRenderer. +* [Web] Add optional parameter track to call getStats. + +[0.5.3] - 2020.11.21 + +* Fix bug. + +[0.5.2] - 2020.11.19 + +* Improve web code + +[0.5.1] - 2020.11.19 + +* Improve unfied-plan API for web. +* Add getTransceivers,getSenders, getReceivers methods. + +[0.5.0+1] - 2020.11.18 + +* Remove dart-webrtc and reuse the code in dart:html + because the code generated by package:js cannot be run in dart2js. + +[0.5.0] - 2020.11.15 + +* [Web] Add Unified-Plan for Flutter Web. +* [Web] Add video frame mirror support for web. +* [Web] Support Simulcast for web. +* [Web] Use dart-webrtc as flutter web plugin. +* [Android/Darwin] Fix crash when unset streamIds in RtpTransceiverInit. +* [Dart]Change the constraints of createOffer/createAnswer as optional. +* [iOS]Fix adding track to stream igal committed (#413) + +[0.4.1] - 2020.11.11 + +* Add transceiver to onTrack events. +* Remove unnecessary log printing. +* Fixed a crash caused by using GetTransceivers under non-unified-plan, + close #389. +* FIX - Invalid type inference (#392) +* [Web]Add onEnded and onMuted for Web (#387) +* [Darwin]Fix PeerConnectionState for darwin. +* [Darwin] Fix compilation warning under darwin. +* [Android] Fixed 'Sender is null' issue when removing track. (#401) +* [iOS] fix removeTrack methodChannel response, onTrack's `stream` and `track` not being registered in native. +* [Darwin/Android] `RtpSender` `setParameters` functionality. + +[0.4.0] - 2020.10.14 + +* Support Unified-Plan for Android/iOS/macOS. +* Add PeerConnectionState and add RTCTrackEvent.. +* [Android] Upgrade GoogleWebRTC@android to 1.0.32006. +* [iOS] Upgrade GoogleWebRTC@ios to 1.1.31999. +* Api standardization across implementation (#364), thanks @wer-mathurin. + [0.3.3] - 2020.09.14 * Add RTCDTMFSender for mobile, web and macOS. diff --git a/Documentation/E2EE.md b/Documentation/E2EE.md new file mode 100644 index 0000000000..a91c8233f7 --- /dev/null +++ b/Documentation/E2EE.md @@ -0,0 +1,92 @@ +# End to End Encryption + +E2EE is an AES-GCM encryption interface injected before sending the packaged RTP packet and after receiving the RTP packet, ensuring that the data is not eavesdropped when passing through SFU or any public transmission network. It coexists with DTLS-SRTP as two layers of encryption. You can control the key, ratchet and other operations of FrameCryptor yourself to ensure that no third party will monitor your tracks. + +## Process of enabling E2EE + +1, Prepare the key provider + +`ratchetSalt` is used to add to the mixture when ratcheting or deriving AES passwords +`aesKey` aesKey is the plaintext password you entered, which will be used to derive the actual password + +```dart + final aesKey = 'you-private-key-here'.codeUnits; + final ratchetSalt = 'flutter-webrtc-ratchet-salt'; + + var keyProviderOptions = KeyProviderOptions( + sharedKey: true, + ratchetSalt: Uint8List.fromList(ratchetSalt.codeUnits), + ratchetWindowSize: 16, + failureTolerance: -1, + ); + + var keyProvider = await frameCyrptorFactory.createDefaultKeyProvider(keyProviderOptions); + /// set shared key for all track, default index is 0 + /// also you can set multiple keys by different indexes + await keyProvider.setSharedKey(key: aesKey); +``` + +2, create PeerConnectioin + +when you use E2EE on the web, please add `encodedInsertableStreams`, + +``` dart +var pc = await createPeerConnection( { + 'encodedInsertableStreams': true, + }); +``` + +3, Enable FrameCryptor for RTPSender. + +```dart +var stream = await navigator.mediaDevices + .getUserMedia({'audio': true, 'video': false }); +var audioTrack = stream.getAudioTracks(); +var sender = await pc.addTrack(audioTrack, stream); + +var trackId = audioTrack?.id; +var id = 'audio_' + trackId! + '_sender'; + +var frameCyrptor = + await frameCyrptorFactory.createFrameCryptorForRtpSender( + participantId: id, + sender: sender, + algorithm: Algorithm.kAesGcm, + keyProvider: keyProvider!); +/// print framecyrptor state +frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('EN onFrameCryptorStateChanged $participantId $state'); + +/// set currently shared key index +await frameCyrptor.setKeyIndex(0); + +/// enable encryption now. +await frameCyrptor.setEnabled(true); +``` + +4, Enable FrameCryptor for RTPReceiver + +```dart + +pc.onTrack((RTCTrackEvent event) async { + var receiver = event.receiver; + var trackId = event.track?.id; + var id = event.track.kind + '_' + trackId! + '_receiver'; + + var frameCyrptor = + await frameCyrptorFactory.createFrameCryptorForRtpReceiver( + participantId: id, + receiver: receiver, + algorithm: Algorithm.kAesGcm, + keyProvider: keyProvider); + + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('DE onFrameCryptorStateChanged $participantId $state'); + + /// set currently shared key index + await frameCyrptor.setKeyIndex(0); + + /// enable encryption now. + await frameCyrptor.setEnabled(true); +}); +``` diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..8e65a49ff2 --- /dev/null +++ b/NOTICE @@ -0,0 +1,51 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +SimulcastVideoEncoderFactoryWrapper.kt + +Apache License 2.0 + +Copyright 2017, Lyo Kato (Original Author) +Copyright 2017-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### + +react-native-webrtc +https://github.com/react-native-webrtc/react-native-webrtc + +The MIT License (MIT) + +Copyright (c) 2015 Howard Yang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 024afa65dc..2715e5a31c 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,46 @@ # Flutter-WebRTC -[![Financial Contributors on Open Collective](https://opencollective.com/flutter-webrtc/all/badge.svg?label=financial+contributors)](https://opencollective.com/flutter-webrtc) [![pub package](https://img.shields.io/pub/v/flutter_webrtc.svg)](https://pub.dartlang.org/packages/flutter_webrtc) [![Gitter](https://badges.gitter.im/flutter-webrtc/Lobby.svg)](https://gitter.im/flutter-webrtc/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) + +[![Financial Contributors on Open Collective](https://opencollective.com/flutter-webrtc/all/badge.svg?label=financial+contributors)](https://opencollective.com/flutter-webrtc) [![pub package](https://img.shields.io/pub/v/flutter_webrtc.svg)](https://pub.dartlang.org/packages/flutter_webrtc) [![Gitter](https://badges.gitter.im/flutter-webrtc/Lobby.svg)](https://gitter.im/flutter-webrtc/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![slack](https://img.shields.io/badge/join-us%20on%20slack-gray.svg?longCache=true&logo=slack&colorB=brightgreen)](https://join.slack.com/t/flutterwebrtc/shared_invite/zt-q83o7y1s-FExGLWEvtkPKM8ku_F8cEQ) WebRTC plugin for Flutter Mobile/Desktop/Web +
+

+Sponsored with 💖   by
+ +Stream Chat + +
+Enterprise Grade APIs for Feeds, Chat, & Video. Try the Flutter Video tutorial 💬 +

+ +
+

+ +LiveKit + +
+ LiveKit - Open source WebRTC and realtime AI infrastructure +

+ ## Functionality -| Feature | Android | iOS | [Web](https://flutter.dev/web) | macOS | Windows | Linux | [Fuchsia](https://fuchsia.googlesource.com/) | -| :-------------: | :-------------:| :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | -| Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | -| Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | -| Screen Capture | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | | | -| Unified-Plan | | | | | | | | -| MediaRecorder| :warning: | :warning: | :heavy_check_mark: | | | | | - -## Usage + +| Feature | Android | iOS | [Web](https://flutter.dev/web) | macOS | Windows | Linux | [Embedded](https://github.com/sony/flutter-elinux) | [Fuchsia](https://fuchsia.dev/) | +| :-------------: | :-------------:| :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | +| Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Screen Capture | :heavy_check_mark: | [:heavy_check_mark:(*)](https://github.com/flutter-webrtc/flutter-webrtc/wiki/iOS-Screen-Sharing) | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Simulcast | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| MediaRecorder | :warning: | :warning: | :heavy_check_mark: | | | | | | +| End to End Encryption | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Insertable Streams | | | | | | | | | + +Additional platform/OS support from the other community + +- flutter-tizen: +- flutter-elinux(WIP): + Add `flutter_webrtc` as a [dependency in your pubspec.yaml file](https://flutter.io/using-packages/). ### iOS @@ -28,6 +56,24 @@ Add the following entry to your _Info.plist_ file, located in `/io This entry allows your app to access camera and microphone. +### Note for iOS + +The WebRTC.xframework compiled after the m104 release no longer supports iOS arm devices, so need to add the `config.build_settings['ONLY_ACTIVE_ARCH'] = 'YES'` to your ios/Podfile in your project + +ios/Podfile + +```ruby +post_install do |installer| + installer.pods_project.targets.each do |target| + flutter_additional_ios_build_settings(target) + target.build_configurations.each do |config| + # Workaround for https://github.com/flutter/flutter/issues/64502 + config.build_settings['ONLY_ACTIVE_ARCH'] = 'YES' # <= this line + end + end +end +``` + ### Android Ensure the following permission is present in your Android Manifest file, located in `/android/app/src/main/AndroidManifest.xml`: @@ -42,9 +88,17 @@ Ensure the following permission is present in your Android Manifest file, locate ``` +If you need to use a Bluetooth device, please add: + +```xml + + +``` + The Flutter project template adds it, so it may already be there. Also you will need to set your build settings to Java 8, because official WebRTC jar now uses static methods in `EglBase` interface. Just add this to your app level `build.gradle`: + ```groovy android { //... @@ -55,15 +109,27 @@ android { } ``` -If necessary, in the same `build.gradle` you will need to increase `minSdkVersion` of `defaultConfig` up to `18` (currently default Flutter generator set it to `16`). +If necessary, in the same `build.gradle` you will need to increase `minSdkVersion` of `defaultConfig` up to `23` (currently default Flutter generator set it to `16`). + +### Important reminder + +When you compile the release apk, you need to add the following operations, +[Setup Proguard Rules](https://github.com/flutter-webrtc/flutter-webrtc/blob/main/android/proguard-rules.pro) ## Contributing + The project is inseparable from the contributors of the community. + - [CloudWebRTC](https://github.com/cloudwebrtc) - Original Author - [RainwayApp](https://github.com/rainwayapp) - Sponsor - [亢少军](https://github.com/kangshaojun) - Sponsor +- [ION](https://github.com/pion/ion) - Sponsor +- [reSipWebRTC](https://github.com/reSipWebRTC) - Sponsor +- [沃德米科技](https://github.com/woodemi)-[36记手写板](https://www.36notes.com) - Sponsor +- [阿斯特网络科技有限公司](https://www.astgo.net/) - Sponsor ### Example + For more examples, please refer to [flutter-webrtc-demo](https://github.com/cloudwebrtc/flutter-webrtc-demo/). ## Contributors diff --git a/analysis_options.yaml b/analysis_options.yaml index b65afa88ef..e125cd7523 100644 --- a/analysis_options.yaml +++ b/analysis_options.yaml @@ -1,4 +1,4 @@ -include: package:pedantic/analysis_options.yaml +include: package:lints/recommended.yaml linter: rules: @@ -9,7 +9,6 @@ linter: - camel_case_extensions - camel_case_types - cancel_subscriptions - - directives_ordering - flutter_style_todos - sort_constructors_first - sort_unnamed_constructors_first @@ -42,7 +41,8 @@ analyzer: # allow self-reference to deprecated members (we do this because otherwise we have # to annotate every member in every test, assert, etc, when we deprecate something) deprecated_member_use_from_same_package: ignore - # Ignore analyzer hints for updating pubspecs when using Future or - # Stream and not importing dart:async - # Please see https://github.com/flutter/flutter/pull/24528 for details. - sdk_version_async_exported_from_core: ignore + # Conflict with import_sorter + directives_ordering: ignore + constant_identifier_names: ignore + deprecated_member_use: ignore + implementation_imports: ignore diff --git a/android/build.gradle b/android/build.gradle index b30a3d8f14..f9b288f868 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -2,31 +2,38 @@ group 'com.cloudwebrtc.webrtc' version '1.0-SNAPSHOT' buildscript { + ext.kotlin_version = '1.7.10' repositories { google() - jcenter() + mavenCentral() } dependencies { - classpath 'com.android.tools.build:gradle:3.6.3' + classpath 'com.android.tools.build:gradle:7.1.1' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" } } rootProject.allprojects { repositories { google() - jcenter() + mavenCentral() + maven { url 'https://jitpack.io' } } } apply plugin: 'com.android.library' +apply plugin: 'kotlin-android' android { - compileSdkVersion 28 + if (project.android.hasProperty("namespace")) { + namespace 'com.cloudwebrtc.webrtc' + } + compileSdkVersion 31 defaultConfig { - minSdkVersion 18 - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + minSdkVersion 21 + testInstrumentationRunner 'androidx.test.runner.AndroidJUnitRunner' consumerProguardFiles 'proguard-rules.pro' } @@ -38,9 +45,15 @@ android { sourceCompatibility JavaVersion.VERSION_1_8 targetCompatibility JavaVersion.VERSION_1_8 } + + kotlinOptions { + jvmTarget = '1.8' + } } dependencies { - api 'org.webrtc:google-webrtc:1.0.30039' - implementation "androidx.annotation:annotation:1.1.0" + implementation 'io.github.webrtc-sdk:android:125.6422.03' + implementation 'com.github.davidliu:audioswitch:89582c47c9a04c62f90aa5e57251af4800a62c9a' + implementation 'androidx.annotation:annotation:1.1.0' + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" } diff --git a/android/gradle.properties b/android/gradle.properties index 8bd86f6805..e60119f371 100644 --- a/android/gradle.properties +++ b/android/gradle.properties @@ -1 +1,4 @@ org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true + diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java b/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java index e355b0c953..d168ff4220 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java @@ -5,42 +5,80 @@ import org.webrtc.CameraVideoCapturer; class CameraEventsHandler implements CameraVideoCapturer.CameraEventsHandler { + public enum CameraState { + NEW, + OPENING, + OPENED, + CLOSED, + DISCONNECTED, + ERROR, + FREEZED + } private final static String TAG = FlutterWebRTCPlugin.TAG; + private CameraState state = CameraState.NEW; + + public void waitForCameraOpen() { + Log.d(TAG, "CameraEventsHandler.waitForCameraOpen"); + while (state != CameraState.OPENED && state != CameraState.ERROR) { + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + public void waitForCameraClosed() { + Log.d(TAG, "CameraEventsHandler.waitForCameraClosed"); + while (state != CameraState.CLOSED && state != CameraState.ERROR) { + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } // Camera error handler - invoked when camera can not be opened // or any camera exception happens on camera thread. @Override public void onCameraError(String errorDescription) { Log.d(TAG, String.format("CameraEventsHandler.onCameraError: errorDescription=%s", errorDescription)); + state = CameraState.ERROR; } // Called when camera is disconnected. @Override public void onCameraDisconnected() { Log.d(TAG, "CameraEventsHandler.onCameraDisconnected"); + state = CameraState.DISCONNECTED; } // Invoked when camera stops receiving frames @Override public void onCameraFreezed(String errorDescription) { Log.d(TAG, String.format("CameraEventsHandler.onCameraFreezed: errorDescription=%s", errorDescription)); + state = CameraState.FREEZED; } // Callback invoked when camera is opening. @Override public void onCameraOpening(String cameraName) { Log.d(TAG, String.format("CameraEventsHandler.onCameraOpening: cameraName=%s", cameraName)); + state = CameraState.OPENING; } // Callback invoked when first camera frame is available after camera is opened. @Override public void onFirstFrameAvailable() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + state = CameraState.OPENED; } // Callback invoked when camera closed. @Override public void onCameraClosed() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + state = CameraState.CLOSED; } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java index 108897fee5..83f316a036 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java @@ -6,24 +6,27 @@ import org.webrtc.DataChannel; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; class DataChannelObserver implements DataChannel.Observer, EventChannel.StreamHandler { - private final int mId; - private final DataChannel mDataChannel; + private final String flutterId; + private final DataChannel dataChannel; - private EventChannel eventChannel; + private final EventChannel eventChannel; private EventChannel.EventSink eventSink; + private final ArrayList eventQueue = new ArrayList(); - DataChannelObserver(BinaryMessenger messenger, String peerConnectionId, int id, + DataChannelObserver(BinaryMessenger messenger, String peerConnectionId, String flutterId, DataChannel dataChannel) { - mId = id; - mDataChannel = dataChannel; + this.flutterId = flutterId; + this.dataChannel = dataChannel; eventChannel = - new EventChannel(messenger, "FlutterWebRTC/dataChannelEvent" + peerConnectionId + id); + new EventChannel(messenger, "FlutterWebRTC/dataChannelEvent" + peerConnectionId + flutterId); eventChannel.setStreamHandler(this); } @@ -44,23 +47,33 @@ private String dataChannelStateString(DataChannel.State dataChannelState) { @Override public void onListen(Object o, EventChannel.EventSink sink) { eventSink = new AnyThreadSink(sink); + for(Object event : eventQueue) { + eventSink.success(event); + } + eventQueue.clear(); } @Override public void onCancel(Object o) { eventSink = null; } - + @Override public void onBufferedAmountChange(long amount) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "dataChannelBufferedAmountChange"); + params.putInt("id", dataChannel.id()); + params.putLong("bufferedAmount", dataChannel.bufferedAmount()); + params.putLong("changedAmount", amount); + sendEvent(params); } @Override public void onStateChange() { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "dataChannelStateChanged"); - params.putInt("id", mDataChannel.id()); - params.putString("state", dataChannelStateString(mDataChannel.state())); + params.putInt("id", dataChannel.id()); + params.putString("state", dataChannelStateString(dataChannel.state())); sendEvent(params); } @@ -68,7 +81,7 @@ public void onStateChange() { public void onMessage(DataChannel.Buffer buffer) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "dataChannelReceiveMessage"); - params.putInt("id", mDataChannel.id()); + params.putInt("id", dataChannel.id()); byte[] bytes; if (buffer.data.hasArray()) { @@ -83,7 +96,7 @@ public void onMessage(DataChannel.Buffer buffer) { params.putByte("data", bytes); } else { params.putString("type", "text"); - params.putString("data", new String(bytes, Charset.forName("UTF-8"))); + params.putString("data", new String(bytes, StandardCharsets.UTF_8)); } sendEvent(params); @@ -92,6 +105,8 @@ public void onMessage(DataChannel.Buffer buffer) { private void sendEvent(ConstraintsMap params) { if (eventSink != null) { eventSink.success(params.toMap()); + } else { + eventQueue.add(params.toMap()); } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java new file mode 100644 index 0000000000..199afdc4b9 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java @@ -0,0 +1,434 @@ +package com.cloudwebrtc.webrtc; + +import android.util.Log; + +import androidx.annotation.NonNull; + +import org.webrtc.FrameCryptor; +import org.webrtc.FrameCryptorAlgorithm; +import org.webrtc.FrameCryptorFactory; +import org.webrtc.FrameCryptorKeyProvider; +import org.webrtc.RtpReceiver; +import org.webrtc.RtpSender; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; + +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.ConstraintsArray; + +public class FlutterRTCFrameCryptor { + + class FrameCryptorStateObserver implements FrameCryptor.Observer, EventChannel.StreamHandler { + public FrameCryptorStateObserver(BinaryMessenger messenger, String frameCryptorId){ + this.frameCryptorId = frameCryptorId; + eventChannel = new EventChannel(messenger, "FlutterWebRTC/frameCryptorEvent" + frameCryptorId); + eventChannel.setStreamHandler(new EventChannel.StreamHandler() { + @Override + public void onListen(Object o, EventChannel.EventSink sink) { + eventSink = new AnyThreadSink(sink); + for(Object event : eventQueue) { + eventSink.success(event); + } + eventQueue.clear(); + } + @Override + public void onCancel(Object o) { + eventSink = null; + } + }); + } + private final EventChannel eventChannel; + private EventChannel.EventSink eventSink; + private final ArrayList eventQueue = new ArrayList(); + private final String frameCryptorId; + + @Override + public void onListen(Object arguments, EventChannel.EventSink events) { + eventSink = new AnyThreadSink(events); + for(Object event : eventQueue) { + eventSink.success(event); + } + eventQueue.clear(); + } + + @Override + public void onCancel(Object arguments) { + eventSink = null; + } + + private String frameCryptorErrorStateToString( FrameCryptor.FrameCryptionState state) { + switch (state) { + case NEW: + return "new"; + case OK: + return "ok"; + case DECRYPTIONFAILED: + return "decryptionFailed"; + case ENCRYPTIONFAILED: + return "encryptionFailed"; + case INTERNALERROR: + return "internalError"; + case KEYRATCHETED: + return "keyRatcheted"; + case MISSINGKEY: + return "missingKey"; + default: + throw new IllegalArgumentException("Unknown FrameCryptorErrorState: " + state); + } + } + + @Override + public void onFrameCryptionStateChanged(String participantId, FrameCryptor.FrameCryptionState state) { + Map event = new HashMap<>(); + event.put("event", "frameCryptionStateChanged"); + event.put("participantId", participantId); + event.put("state",frameCryptorErrorStateToString(state)); + if (eventSink != null) { + eventSink.success(event); + } else { + eventQueue.add(event); + } + } + } + + private static final String TAG = "FlutterRTCFrameCryptor"; + private final Map frameCryptos = new HashMap<>(); + private final Map frameCryptoObservers = new HashMap<>(); + private final Map keyProviders = new HashMap<>(); + private final StateProvider stateProvider; + public FlutterRTCFrameCryptor(StateProvider stateProvider) { + this.stateProvider = stateProvider; + } + public boolean handleMethodCall(MethodCall call, @NonNull Result result) { + String method_name = call.method; + Map params = (Map) call.arguments; + if (method_name.equals("frameCryptorFactoryCreateFrameCryptor")) { + frameCryptorFactoryCreateFrameCryptor(params, result); + } else if (method_name.equals("frameCryptorSetKeyIndex")) { + frameCryptorSetKeyIndex(params, result); + } else if (method_name.equals("frameCryptorGetKeyIndex")) { + frameCryptorGetKeyIndex(params, result); + } else if (method_name.equals("frameCryptorSetEnabled")) { + frameCryptorSetEnabled(params, result); + } else if (method_name.equals("frameCryptorGetEnabled")) { + frameCryptorGetEnabled(params, result); + } else if (method_name.equals("frameCryptorDispose")) { + frameCryptorDispose(params, result); + } else if (method_name.equals("frameCryptorFactoryCreateKeyProvider")) { + frameCryptorFactoryCreateKeyProvider(params, result); + }else if (method_name.equals("keyProviderSetSharedKey")) { + keyProviderSetSharedKey(params, result); + } else if (method_name.equals("keyProviderRatchetSharedKey")) { + keyProviderRatchetSharedKey(params, result); + } else if (method_name.equals("keyProviderExportSharedKey")) { + keyProviderExportKey(params, result); + } else if (method_name.equals("keyProviderSetKey")) { + keyProviderSetKey(params, result); + } else if (method_name.equals("keyProviderRatchetKey")) { + keyProviderRatchetKey(params, result); + } else if (method_name.equals("keyProviderExportKey")) { + keyProviderExportKey(params, result); + } else if (method_name.equals("keyProviderSetSifTrailer")) { + keyProviderSetSifTrailer(params, result); + } else if (method_name.equals("keyProviderDispose")) { + keyProviderDispose(params, result); + } else { + return false; + } + return true; + } + + private FrameCryptorAlgorithm frameCryptorAlgorithmFromInt(int algorithm) { + switch (algorithm) { + case 0: + return FrameCryptorAlgorithm.AES_GCM; + default: + return FrameCryptorAlgorithm.AES_GCM; + } + } + + private void frameCryptorFactoryCreateFrameCryptor(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("frameCryptorFactoryCreateFrameCryptorFailed", "keyProvider not found", null); + return; + } + String peerConnectionId = (String) params.get("peerConnectionId"); + PeerConnectionObserver pco = stateProvider.getPeerConnectionObserver(peerConnectionId); + if (pco == null) { + result.error("frameCryptorFactoryCreateFrameCryptorFailed", "peerConnection not found", null); + return; + } + String participantId = (String) params.get("participantId"); + String type = (String) params.get("type"); + int algorithm = (int) params.get("algorithm"); + String rtpSenderId = (String) params.get("rtpSenderId"); + String rtpReceiverId = (String) params.get("rtpReceiverId"); + + if(type.equals("sender")) { + RtpSender rtpSender = pco.getRtpSenderById(rtpSenderId); + + FrameCryptor frameCryptor = FrameCryptorFactory.createFrameCryptorForRtpSender(stateProvider.getPeerConnectionFactory(), + rtpSender, + participantId, + frameCryptorAlgorithmFromInt(algorithm), + keyProvider); + String frameCryptorId = UUID.randomUUID().toString(); + frameCryptos.put(frameCryptorId, frameCryptor); + FrameCryptorStateObserver observer = new FrameCryptorStateObserver(stateProvider.getMessenger(), frameCryptorId); + frameCryptor.setObserver(observer); + frameCryptoObservers.put(frameCryptorId, observer); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("frameCryptorId", frameCryptorId); + result.success(paramsResult.toMap()); + } else if(type.equals("receiver")) { + RtpReceiver rtpReceiver = pco.getRtpReceiverById(rtpReceiverId); + + FrameCryptor frameCryptor = FrameCryptorFactory.createFrameCryptorForRtpReceiver(stateProvider.getPeerConnectionFactory(), + rtpReceiver, + participantId, + frameCryptorAlgorithmFromInt(algorithm), + keyProvider); + String frameCryptorId = UUID.randomUUID().toString(); + frameCryptos.put(frameCryptorId, frameCryptor); + FrameCryptorStateObserver observer = new FrameCryptorStateObserver(stateProvider.getMessenger(), frameCryptorId); + frameCryptor.setObserver(observer); + frameCryptoObservers.put(frameCryptorId, observer); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("frameCryptorId", frameCryptorId); + result.success(paramsResult.toMap()); + } else { + result.error("frameCryptorFactoryCreateFrameCryptorFailed", "type must be sender or receiver", null); + return; + } + } + + private void frameCryptorSetKeyIndex(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorSetKeyIndexFailed", "frameCryptor not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + frameCryptor.setKeyIndex(keyIndex); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void frameCryptorGetKeyIndex(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorGetKeyIndexFailed", "frameCryptor not found", null); + return; + } + int keyIndex = frameCryptor.getKeyIndex(); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putInt("keyIndex", keyIndex); + result.success(paramsResult.toMap()); + } + + private void frameCryptorSetEnabled(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorSetEnabledFailed", "frameCryptor not found", null); + return; + } + boolean enabled = (boolean) params.get("enabled"); + frameCryptor.setEnabled(enabled); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", enabled); + result.success(paramsResult.toMap()); + } + + private void frameCryptorGetEnabled(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorGetEnabledFailed", "frameCryptor not found", null); + return; + } + boolean enabled = frameCryptor.isEnabled(); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("enabled", enabled); + result.success(paramsResult.toMap()); + } + + private void frameCryptorDispose(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorDisposeFailed", "frameCryptor not found", null); + return; + } + frameCryptor.dispose(); + frameCryptos.remove(frameCryptorId); + frameCryptoObservers.remove(frameCryptorId); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("result", "success"); + result.success(paramsResult.toMap()); + } + + private void frameCryptorFactoryCreateKeyProvider(Map params, @NonNull Result result) { + String keyProviderId = UUID.randomUUID().toString(); + Map keyProviderOptions = (Map) params.get("keyProviderOptions"); + boolean sharedKey = (boolean) keyProviderOptions.get("sharedKey"); + int ratchetWindowSize = (int) keyProviderOptions.get("ratchetWindowSize"); + int failureTolerance = (int) keyProviderOptions.get("failureTolerance"); + byte[] ratchetSalt = ( byte[]) keyProviderOptions.get("ratchetSalt"); + byte[] uncryptedMagicBytes = new byte[0]; + if(keyProviderOptions.containsKey("uncryptedMagicBytes")) { + uncryptedMagicBytes = ( byte[]) keyProviderOptions.get("uncryptedMagicBytes"); + } + int keyRingSize = (int) keyProviderOptions.get("keyRingSize"); + boolean discardFrameWhenCryptorNotReady = (boolean) keyProviderOptions.get("discardFrameWhenCryptorNotReady"); + FrameCryptorKeyProvider keyProvider = FrameCryptorFactory.createFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); + ConstraintsMap paramsResult = new ConstraintsMap(); + keyProviders.put(keyProviderId, keyProvider); + paramsResult.putString("keyProviderId", keyProviderId); + result.success(paramsResult.toMap()); + } + + private void keyProviderSetSharedKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetKeySharedFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + byte[] key = ( byte[]) params.get("key"); + keyProvider.setSharedKey(keyIndex, key); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void keyProviderRatchetSharedKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderRatchetSharedKeyFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + + byte[] newKey = keyProvider.ratchetSharedKey(keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", newKey); + result.success(paramsResult.toMap()); + } + + private void keyProviderExportSharedKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderExportSharedKeyFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + + byte[] key = keyProvider.exportSharedKey(keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", key); + result.success(paramsResult.toMap()); + } + + private void keyProviderSetKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetKeyFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + String participantId = (String) params.get("participantId"); + byte[] key = ( byte[]) params.get("key"); + keyProvider.setKey(participantId, keyIndex, key); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void keyProviderRatchetKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetKeysFailed", "keyProvider not found", null); + return; + } + String participantId = (String) params.get("participantId"); + int keyIndex = (int) params.get("keyIndex"); + + byte[] newKey = keyProvider.ratchetKey(participantId, keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", newKey); + result.success(paramsResult.toMap()); + } + + private void keyProviderExportKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderExportKeyFailed", "keyProvider not found", null); + return; + } + String participantId = (String) params.get("participantId"); + int keyIndex = (int) params.get("keyIndex"); + + byte[] key = keyProvider.exportKey(participantId, keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", key); + result.success(paramsResult.toMap()); + } + + private void keyProviderSetSifTrailer(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetSifTrailerFailed", "keyProvider not found", null); + return; + } + byte[] sifTrailer = ( byte[]) params.get("sifTrailer"); + keyProvider.setSifTrailer(sifTrailer); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void keyProviderDispose(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderDisposeFailed", "keyProvider not found", null); + return; + } + keyProvider.dispose(); + keyProviders.remove(keyProviderId); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("result", "success"); + result.success(paramsResult.toMap()); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java index ca8df79a4b..4c1598c98e 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java @@ -2,6 +2,7 @@ import android.util.Log; import android.graphics.SurfaceTexture; +import android.view.Surface; import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; @@ -20,20 +21,22 @@ public class FlutterRTCVideoRenderer implements EventChannel.StreamHandler { private static final String TAG = FlutterWebRTCPlugin.TAG; - private final SurfaceTexture texture; - private TextureRegistry.SurfaceTextureEntry entry; + private final TextureRegistry.SurfaceProducer producer; private int id = -1; + private MediaStream mediaStream; - public void Dispose(){ + private String ownerTag; + + public void Dispose() { //destroy - if(surfaceTextureRenderer != null) { + if (surfaceTextureRenderer != null) { surfaceTextureRenderer.release(); } - if(eventChannel != null) + if (eventChannel != null) eventChannel.setStreamHandler(null); eventSink = null; - entry.release(); + producer.release(); } /** @@ -43,16 +46,18 @@ public void Dispose(){ private RendererEvents rendererEvents; private void listenRendererEvents() { - rendererEvents = new RendererEvents() { + rendererEvents = new RendererEvents() { private int _rotation = -1; - private int _width = 0, _height = 0; + private int _width = 0, _height = 0; @Override public void onFirstFrameRendered() { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "didFirstFrameRendered"); params.putInt("id", id); - eventSink.success(params.toMap()); + if (eventSink != null) { + eventSink.success(params.toMap()); + } } @Override @@ -60,8 +65,8 @@ public void onFrameResolutionChanged( int videoWidth, int videoHeight, int rotation) { - if(eventSink != null) { - if(_width != videoWidth || _height != videoHeight) { + if (eventSink != null) { + if (_width != videoWidth || _height != videoHeight) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "didTextureChangeVideoSize"); params.putInt("id", id); @@ -72,7 +77,7 @@ public void onFrameResolutionChanged( eventSink.success(params.toMap()); } - if(_rotation != rotation) { + if (_rotation != rotation) { ConstraintsMap params2 = new ConstraintsMap(); params2.putString("event", "didTextureChangeRotation"); params2.putInt("id", id); @@ -84,7 +89,8 @@ public void onFrameResolutionChanged( } }; } - private SurfaceTextureRenderer surfaceTextureRenderer; + + private final SurfaceTextureRenderer surfaceTextureRenderer; /** * The {@code VideoTrack}, if any, rendered by this {@code FlutterRTCVideoRenderer}. @@ -94,22 +100,22 @@ public void onFrameResolutionChanged( EventChannel eventChannel; EventChannel.EventSink eventSink; - public FlutterRTCVideoRenderer(SurfaceTexture texture, TextureRegistry.SurfaceTextureEntry entry) { + public FlutterRTCVideoRenderer(TextureRegistry.SurfaceProducer producer) { this.surfaceTextureRenderer = new SurfaceTextureRenderer(""); listenRendererEvents(); surfaceTextureRenderer.init(EglUtils.getRootEglBaseContext(), rendererEvents); - surfaceTextureRenderer.surfaceCreated(texture); + surfaceTextureRenderer.surfaceCreated(producer); - this.texture = texture; this.eventSink = null; - this.entry = entry; + this.producer = producer; + this.ownerTag = null; } - public void setEventChannel(EventChannel eventChannel){ + public void setEventChannel(EventChannel eventChannel) { this.eventChannel = eventChannel; } - public void setId(int id){ + public void setId(int id) { this.id = id; } @@ -137,17 +143,48 @@ private void removeRendererFromVideoTrack() { * specified {@code mediaStream}. * * @param mediaStream The {@code MediaStream} to be rendered by this - * {@code FlutterRTCVideoRenderer} or {@code null}. + * {@code FlutterRTCVideoRenderer} or {@code null}. */ - public void setStream(MediaStream mediaStream) { + public void setStream(MediaStream mediaStream, String ownerTag) { VideoTrack videoTrack; + this.mediaStream = mediaStream; + this.ownerTag = ownerTag; + if (mediaStream == null) { + videoTrack = null; + } else { + List videoTracks = mediaStream.videoTracks; + videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0); + } + + setVideoTrack(videoTrack); + } + /** + * Sets the {@code MediaStream} to be rendered by this {@code FlutterRTCVideoRenderer}. + * The implementation renders the first {@link VideoTrack}, if any, of the + * specified trackId + * + * @param mediaStream The {@code MediaStream} to be rendered by this + * {@code FlutterRTCVideoRenderer} or {@code null}. + * @param trackId The {@code trackId} to be rendered by this + * {@code FlutterRTCVideoRenderer} or {@code null}. + */ + public void setStream(MediaStream mediaStream,String trackId, String ownerTag) { + VideoTrack videoTrack; + this.mediaStream = mediaStream; + this.ownerTag = ownerTag; if (mediaStream == null) { videoTrack = null; } else { List videoTracks = mediaStream.videoTracks; videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0); + + for (VideoTrack track : videoTracks){ + if (track.id().equals(trackId)){ + videoTrack = track; + } + } } setVideoTrack(videoTrack); @@ -157,9 +194,9 @@ public void setStream(MediaStream mediaStream) { * Sets the {@code VideoTrack} to be rendered by this {@code FlutterRTCVideoRenderer}. * * @param videoTrack The {@code VideoTrack} to be rendered by this - * {@code FlutterRTCVideoRenderer} or {@code null}. + * {@code FlutterRTCVideoRenderer} or {@code null}. */ - private void setVideoTrack(VideoTrack videoTrack) { + public void setVideoTrack(VideoTrack videoTrack) { VideoTrack oldValue = this.videoTrack; if (oldValue != videoTrack) { @@ -170,9 +207,14 @@ private void setVideoTrack(VideoTrack videoTrack) { this.videoTrack = videoTrack; if (videoTrack != null) { - tryAddRendererToVideoTrack(); + try { + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to " + videoTrack.id()); + tryAddRendererToVideoTrack(); + } catch (Exception e) { + Log.e(TAG, "tryAddRendererToVideoTrack " + e); + } } else { - Log.w(TAG, "VideoTrack is null"); + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to null"); } } } @@ -181,7 +223,7 @@ private void setVideoTrack(VideoTrack videoTrack) { * Starts rendering {@link #videoTrack} if rendering is not in progress and * all preconditions for the start of rendering are met. */ - private void tryAddRendererToVideoTrack() { + private void tryAddRendererToVideoTrack() throws Exception { if (videoTrack != null) { EglBase.Context sharedContext = EglUtils.getRootEglBaseContext(); @@ -195,10 +237,23 @@ private void tryAddRendererToVideoTrack() { surfaceTextureRenderer.release(); listenRendererEvents(); surfaceTextureRenderer.init(sharedContext, rendererEvents); - surfaceTextureRenderer.surfaceCreated(texture); + surfaceTextureRenderer.surfaceCreated(producer); videoTrack.addSink(surfaceTextureRenderer); } } + public boolean checkMediaStream(String id, String ownerTag) { + if (null == id || null == mediaStream || ownerTag == null || !ownerTag.equals(this.ownerTag)) { + return false; + } + return id.equals(mediaStream.getId()); + } + + public boolean checkVideoTrack(String id, String ownerTag) { + if (null == id || null == videoTrack || ownerTag == null || !ownerTag.equals(this.ownerTag)) { + return false; + } + return id.equals(videoTrack.id()); + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java index 5351e7f6d2..3a49f88c85 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java @@ -1,142 +1,198 @@ package com.cloudwebrtc.webrtc; import android.app.Activity; +import android.app.Application; import android.content.Context; +import android.os.Bundle; import android.util.Log; + import androidx.annotation.NonNull; -import com.cloudwebrtc.webrtc.MethodCallHandlerImpl.AudioManager; -import com.cloudwebrtc.webrtc.utils.RTCAudioManager; +import androidx.lifecycle.DefaultLifecycleObserver; +import androidx.lifecycle.Lifecycle; +import androidx.lifecycle.LifecycleOwner; + +import com.cloudwebrtc.webrtc.audio.AudioProcessingController; +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; + +import org.webrtc.ExternalAudioProcessingFactory; +import org.webrtc.MediaStreamTrack; + import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.embedding.engine.plugins.activity.ActivityAware; import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding; +import io.flutter.embedding.engine.plugins.lifecycle.HiddenLifecycleReference; import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel; -import io.flutter.plugin.common.PluginRegistry.Registrar; import io.flutter.view.TextureRegistry; -import java.util.Set; /** * FlutterWebRTCPlugin */ -public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware { - - static public final String TAG = "FlutterWebRTCPlugin"; - - private RTCAudioManager rtcAudioManager; - private MethodChannel channel; - private MethodCallHandlerImpl methodCallHandler; - - public FlutterWebRTCPlugin() { - } - - /** - * Plugin registration. - */ - public static void registerWith(Registrar registrar) { - final FlutterWebRTCPlugin plugin = new FlutterWebRTCPlugin(); - - plugin.startListening(registrar.context(), registrar.messenger(), registrar.textures()); - - if (registrar.activeContext() instanceof Activity) { - plugin.methodCallHandler.setActivity((Activity) registrar.activeContext()); - } - - registrar.addViewDestroyListener(view -> { - plugin.stopListening(); - return false; - }); - } - - @Override - public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { - startListening(binding.getApplicationContext(), binding.getBinaryMessenger(), - binding.getTextureRegistry()); - } - - @Override - public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { - stopListening(); - } - - @Override - public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) { - methodCallHandler.setActivity(binding.getActivity()); - } - - @Override - public void onDetachedFromActivityForConfigChanges() { - methodCallHandler.setActivity(null); - } - - @Override - public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) { - methodCallHandler.setActivity(binding.getActivity()); - } - - @Override - public void onDetachedFromActivity() { - methodCallHandler.setActivity(null); - } - - private void startListening(final Context context, BinaryMessenger messenger, - TextureRegistry textureRegistry) { - methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry, - new AudioManager() { - @Override - public void onAudioManagerRequested(boolean requested) { - if (requested) { - if (rtcAudioManager == null) { - rtcAudioManager = RTCAudioManager.create(context); - } - rtcAudioManager.start(FlutterWebRTCPlugin.this::onAudioManagerDevicesChanged); - } else { - if (rtcAudioManager != null) { - rtcAudioManager.stop(); - rtcAudioManager = null; - } +public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventChannel.StreamHandler { + + static public final String TAG = "FlutterWebRTCPlugin"; + private static Application application; + + private MethodChannel methodChannel; + private MethodCallHandlerImpl methodCallHandler; + private LifeCycleObserver observer; + private Lifecycle lifecycle; + private EventChannel eventChannel; + public EventChannel.EventSink eventSink; + + public FlutterWebRTCPlugin() { + sharedSingleton = this; + } + + public static FlutterWebRTCPlugin sharedSingleton; + + public AudioProcessingController getAudioProcessingController() { + return methodCallHandler.audioProcessingController; + } + + public MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { + return methodCallHandler.getTrackForId(trackId, peerConnectionId); + } + + public LocalTrack getLocalTrack(String trackId) { + return methodCallHandler.getLocalTrack(trackId); + } + + public MediaStreamTrack getRemoteTrack(String trackId) { + return methodCallHandler.getRemoteTrack(trackId); + } + + @Override + public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { + startListening(binding.getApplicationContext(), binding.getBinaryMessenger(), + binding.getTextureRegistry()); + } + + @Override + public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { + stopListening(); + } + + @Override + public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) { + methodCallHandler.setActivity(binding.getActivity()); + this.observer = new LifeCycleObserver(); + this.lifecycle = ((HiddenLifecycleReference) binding.getLifecycle()).getLifecycle(); + this.lifecycle.addObserver(this.observer); + } + + @Override + public void onDetachedFromActivityForConfigChanges() { + methodCallHandler.setActivity(null); + } + + @Override + public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) { + methodCallHandler.setActivity(binding.getActivity()); + } + + @Override + public void onDetachedFromActivity() { + methodCallHandler.setActivity(null); + if (this.observer != null) { + this.lifecycle.removeObserver(this.observer); + if (application!=null) { + application.unregisterActivityLifecycleCallbacks(this.observer); } - } + } + this.lifecycle = null; + } + + private void startListening(final Context context, BinaryMessenger messenger, + TextureRegistry textureRegistry) { + AudioSwitchManager.instance = new AudioSwitchManager(context); + methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry); + methodChannel = new MethodChannel(messenger, "FlutterWebRTC.Method"); + methodChannel.setMethodCallHandler(methodCallHandler); + eventChannel = new EventChannel( messenger,"FlutterWebRTC.Event"); + eventChannel.setStreamHandler(this); + AudioSwitchManager.instance.audioDeviceChangeListener = (devices, currentDevice) -> { + Log.w(TAG, "audioFocusChangeListener " + devices+ " " + currentDevice); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onDeviceChange"); + sendEvent(params.toMap()); + return null; + }; + } + + private void stopListening() { + methodCallHandler.dispose(); + methodCallHandler = null; + methodChannel.setMethodCallHandler(null); + eventChannel.setStreamHandler(null); + if (AudioSwitchManager.instance != null) { + Log.d(TAG, "Stopping the audio manager..."); + AudioSwitchManager.instance.stop(); + } + } + + @Override + public void onListen(Object arguments, EventChannel.EventSink events) { + eventSink = new AnyThreadSink(events); + } + @Override + public void onCancel(Object arguments) { + eventSink = null; + } - @Override - public void setMicrophoneMute(boolean mute) { - if (rtcAudioManager != null) { - rtcAudioManager.setMicrophoneMute(mute); + public void sendEvent(Object event) { + if(eventSink != null) { + eventSink.success(event); + } + } + + private class LifeCycleObserver implements Application.ActivityLifecycleCallbacks, DefaultLifecycleObserver { + + @Override + public void onActivityCreated(Activity activity, Bundle savedInstanceState) { + + } + + @Override + public void onActivityStarted(Activity activity) { + + } + + @Override + public void onActivityResumed(Activity activity) { + if (null != methodCallHandler) { + methodCallHandler.reStartCamera(); } - } + } - @Override - public void setSpeakerphoneOn(boolean on) { - if (rtcAudioManager != null) { - rtcAudioManager.setSpeakerphoneOn(on); + @Override + public void onResume(LifecycleOwner owner) { + if (null != methodCallHandler) { + methodCallHandler.reStartCamera(); } - } - }); + } - channel = new MethodChannel(messenger, "FlutterWebRTC.Method"); - channel.setMethodCallHandler(methodCallHandler); - } + @Override + public void onActivityPaused(Activity activity) { - private void stopListening() { - methodCallHandler.dispose(); - methodCallHandler = null; - channel.setMethodCallHandler(null); + } - if (rtcAudioManager != null) { - Log.d(TAG, "Stopping the audio manager..."); - rtcAudioManager.stop(); - rtcAudioManager = null; - } - } + @Override + public void onActivityStopped(Activity activity) { + + } - // This method is called when the audio manager reports audio device change, - // e.g. from wired headset to speakerphone. - private void onAudioManagerDevicesChanged( - final RTCAudioManager.AudioDevice device, - final Set availableDevices) { - Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", " - + "selected: " + device); - // TODO(henrika): add callback handler. - } + @Override + public void onActivitySaveInstanceState(Activity activity, Bundle outState) { + } + @Override + public void onActivityDestroyed(Activity activity) { + + } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index 8d94262ec5..0b0998f384 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -4,37 +4,38 @@ import android.app.Activity; import android.app.Fragment; import android.app.FragmentTransaction; +import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; -import android.hardware.Camera; -import android.hardware.Camera.Parameters; -import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCaptureSession; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraDevice; +import android.graphics.Point; import android.hardware.camera2.CameraManager; -import android.hardware.camera2.CaptureRequest; +import android.media.AudioDeviceInfo; import android.media.projection.MediaProjection; import android.media.projection.MediaProjectionManager; +import android.net.Uri; import android.os.Build; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; +import android.os.ParcelFileDescriptor; import android.os.ResultReceiver; import android.provider.MediaStore; import android.util.Log; -import android.util.Range; +import android.util.Pair; import android.util.SparseArray; -import android.view.Surface; +import android.view.Display; import android.view.WindowManager; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.audio.AudioUtils; +import com.cloudwebrtc.webrtc.audio.LocalAudioTrack; import com.cloudwebrtc.webrtc.record.AudioChannel; import com.cloudwebrtc.webrtc.record.AudioSamplesInterceptor; import com.cloudwebrtc.webrtc.record.MediaRecorderImpl; @@ -46,21 +47,24 @@ import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils; import com.cloudwebrtc.webrtc.utils.ObjectType; import com.cloudwebrtc.webrtc.utils.PermissionUtils; +import com.cloudwebrtc.webrtc.video.LocalVideoTrack; +import com.cloudwebrtc.webrtc.video.VideoCapturerInfo; import org.webrtc.AudioSource; import org.webrtc.AudioTrack; import org.webrtc.Camera1Capturer; import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera1Helper; import org.webrtc.Camera2Capturer; import org.webrtc.Camera2Enumerator; -import org.webrtc.CameraEnumerationAndroid.CaptureFormat; +import org.webrtc.Camera2Helper; import org.webrtc.CameraEnumerator; import org.webrtc.CameraVideoCapturer; import org.webrtc.MediaConstraints; import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnectionFactory; -import org.webrtc.ScreenCapturerAndroid; +import org.webrtc.Size; import org.webrtc.SurfaceTextureHelper; import org.webrtc.VideoCapturer; import org.webrtc.VideoSource; @@ -68,7 +72,9 @@ import org.webrtc.audio.JavaAudioDeviceModule; import java.io.File; -import java.lang.reflect.Field; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -80,8 +86,7 @@ * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce * complexity and to (somewhat) separate concerns. */ -class GetUserMediaImpl { - +public class GetUserMediaImpl { private static final int DEFAULT_WIDTH = 1280; private static final int DEFAULT_HEIGHT = 720; private static final int DEFAULT_FPS = 30; @@ -89,7 +94,7 @@ class GetUserMediaImpl { private static final String PERMISSION_AUDIO = Manifest.permission.RECORD_AUDIO; private static final String PERMISSION_VIDEO = Manifest.permission.CAMERA; private static final String PERMISSION_SCREEN = "android.permission.MediaProjection"; - private static int CAPTURE_PERMISSION_REQUEST_CODE = 1; + private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1; private static final String GRANT_RESULTS = "GRANT_RESULT"; private static final String PERMISSIONS = "PERMISSION"; private static final String PROJECTION_DATA = "PROJECTION_DATA"; @@ -98,21 +103,24 @@ class GetUserMediaImpl { static final String TAG = FlutterWebRTCPlugin.TAG; - private final Map mVideoCapturers = new HashMap<>(); - + private final Map mVideoCapturers = new HashMap<>(); + private final Map mSurfaceTextureHelpers = new HashMap<>(); private final StateProvider stateProvider; private final Context applicationContext; static final int minAPILevel = Build.VERSION_CODES.LOLLIPOP; - private MediaProjectionManager mProjectionManager = null; - private static MediaProjection sMediaProjection = null; final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor(); private OutputAudioSamplesInterceptor outputSamplesInterceptor = null; JavaAudioDeviceModule audioDeviceModule; private final SparseArray mediaRecorders = new SparseArray<>(); + private AudioDeviceInfo preferredInput = null; + private boolean isTorchOn; + private Intent mediaProjectionData = null; - public void screenRequestPremissions(ResultReceiver resultReceiver) { + + public void screenRequestPermissions(ResultReceiver resultReceiver) { + mediaProjectionData = null; final Activity activity = stateProvider.getActivity(); if (activity == null) { // Activity went away, nothing we can do. @@ -139,11 +147,27 @@ public void screenRequestPremissions(ResultReceiver resultReceiver) { } } + public void requestCapturePermission(final Result result) { + screenRequestPermissions( + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int requestCode, Bundle resultData) { + int resultCode = resultData.getInt(GRANT_RESULTS); + if (resultCode == Activity.RESULT_OK) { + mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + result.success(true); + } else { + result.success(false); + } + } + }); + } + public static class ScreenRequestPermissionsFragment extends Fragment { private ResultReceiver resultReceiver = null; private int requestCode = 0; - private int resultCode = 0; + private final int resultCode = 0; private void checkSelfPermissions(boolean requestPermissions) { if (resultCode != Activity.RESULT_OK) { @@ -211,6 +235,12 @@ public void onResume() { this.applicationContext = applicationContext; } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + /** * Includes default constraints set for the audio media type. * @@ -237,22 +267,21 @@ private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' * is false * @param sourceId (String) use this sourceId and ignore facing mode if specified. - * @return VideoCapturer can invoke with startCapture/stopCapture null + * @return Pair of deviceName to VideoCapturer. Can invoke with startCapture/stopCapture null * if not matched camera with specified facing mode. */ - private VideoCapturer createVideoCapturer( - CameraEnumerator enumerator, boolean isFacing, String sourceId) { - VideoCapturer videoCapturer = null; - + private Pair createVideoCapturer( + CameraEnumerator enumerator, boolean isFacing, String sourceId, CameraEventsHandler cameraEventsHandler) { + VideoCapturer videoCapturer; // if sourceId given, use specified sourceId first final String[] deviceNames = enumerator.getDeviceNames(); - if (sourceId != null) { + if (sourceId != null && !sourceId.equals("")) { for (String name : deviceNames) { if (name.equals(sourceId)) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + videoCapturer = enumerator.createCapturer(name, cameraEventsHandler); if (videoCapturer != null) { Log.d(TAG, "create user specified camera " + name + " succeeded"); - return videoCapturer; + return new Pair<>(name, videoCapturer); } else { Log.d(TAG, "create user specified camera " + name + " failed"); break; // fallback to facing mode @@ -265,17 +294,25 @@ private VideoCapturer createVideoCapturer( String facingStr = isFacing ? "front" : "back"; for (String name : deviceNames) { if (enumerator.isFrontFacing(name) == isFacing) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + videoCapturer = enumerator.createCapturer(name, cameraEventsHandler); if (videoCapturer != null) { Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); - return videoCapturer; + + return new Pair<>(name, videoCapturer); } else { Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); } } } - // should we fallback to available camera automatically? - return videoCapturer; + + // falling back to the first available camera + if (deviceNames.length > 0) { + videoCapturer = enumerator.createCapturer(deviceNames[0], cameraEventsHandler); + Log.d(TAG, "Falling back to the first available camera"); + return new Pair<>(deviceNames[0], videoCapturer); + } + + return null; } /** @@ -295,6 +332,10 @@ private String getFacingMode(ConstraintsMap mediaConstraints) { * @return String value of "sourceId" optional "GUM" constraint or null if not specified. */ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { + if (mediaConstraints != null + && mediaConstraints.hasKey("deviceId")) { + return mediaConstraints.getString("deviceId"); + } if (mediaConstraints != null && mediaConstraints.hasKey("optional") && mediaConstraints.getType("optional") == ObjectType.Array) { @@ -314,13 +355,15 @@ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { return null; } - private AudioTrack getUserAudio(ConstraintsMap constraints) { - MediaConstraints audioConstraints; + private ConstraintsMap getUserAudio(ConstraintsMap constraints, MediaStream stream) { + AudioSwitchManager.instance.start(); + MediaConstraints audioConstraints = new MediaConstraints(); + String deviceId = null; if (constraints.getType("audio") == ObjectType.Boolean) { - audioConstraints = new MediaConstraints(); addDefaultAudioConstraints(audioConstraints); } else { audioConstraints = MediaConstraintsUtils.parseMediaConstraints(constraints.getMap("audio")); + deviceId = getSourceIdConstraint(constraints.getMap("audio")); } Log.i(TAG, "getUserMedia(audio): " + audioConstraints); @@ -328,8 +371,36 @@ private AudioTrack getUserAudio(ConstraintsMap constraints) { String trackId = stateProvider.getNextTrackUUID(); PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); AudioSource audioSource = pcFactory.createAudioSource(audioConstraints); + AudioTrack track = pcFactory.createAudioTrack(trackId, audioSource); + stream.addTrack(track); + + stateProvider.putLocalTrack(track.id(), new LocalAudioTrack(track)); + + ConstraintsMap trackParams = new ConstraintsMap(); + trackParams.putBoolean("enabled", track.enabled()); + trackParams.putString("id", track.id()); + trackParams.putString("kind", "audio"); + trackParams.putString("label", track.id()); + trackParams.putString("readyState", track.state().toString()); + trackParams.putBoolean("remote", false); + + if (deviceId == null) { + if (VERSION.SDK_INT >= VERSION_CODES.M) { + deviceId = "" + getPreferredInputDevice(preferredInput); + } + } - return pcFactory.createAudioTrack(trackId, audioSource); + ConstraintsMap settings = new ConstraintsMap(); + settings.putString("deviceId", deviceId); + settings.putString("kind", "audioinput"); + settings.putBoolean("autoGainControl", true); + settings.putBoolean("echoCancellation", true); + settings.putBoolean("noiseSuppression", true); + settings.putInt("channelCount", 1); + settings.putInt("latency", 0); + trackParams.putMap("settings", settings.toMap()); + + return trackParams; } /** @@ -340,23 +411,6 @@ private AudioTrack getUserAudio(ConstraintsMap constraints) { void getUserMedia( final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { - // TODO: change getUserMedia constraints format to support new syntax - // constraint format seems changed, and there is no mandatory any more. - // and has a new syntax/attrs to specify resolution - // should change `parseConstraints()` according - // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints - - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { - videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); - } - } - final ArrayList requestPermissions = new ArrayList<>(); if (constraints.hasKey("audio")) { @@ -396,7 +450,7 @@ void getUserMedia( // requestedMediaTypes is the empty set, the method invocation fails // with a TypeError. if (requestPermissions.isEmpty()) { - result.error("TypeError", "constraints requests no media types", null); + resultError("getUserMedia", "TypeError, constraints requests no media types", result); return; } @@ -423,127 +477,119 @@ public void invoke(Object... args) { // getUserMedia() algorithm, if the user has denied // permission, fail "with a new DOMException object whose // name attribute has the value NotAllowedError." - result.error("DOMException", "NotAllowedError", null); + resultError("getUserMedia", "DOMException, NotAllowedError", result); } }); } void getDisplayMedia( final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { - videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); - } + if (mediaProjectionData == null) { + screenRequestPermissions( + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int requestCode, Bundle resultData) { + Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + int resultCode = resultData.getInt(GRANT_RESULTS); + + if (resultCode != Activity.RESULT_OK) { + resultError("screenRequestPermissions", "User didn't give permission to capture the screen.", result); + return; + } + getDisplayMedia(result, mediaStream, mediaProjectionData); + } + }); + } else { + getDisplayMedia(result, mediaStream, mediaProjectionData); } + } - final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; + private void getDisplayMedia(final Result result, final MediaStream mediaStream, final Intent mediaProjectionData) { + /* Create ScreenCapture */ + VideoTrack displayTrack = null; + VideoCapturer videoCapturer = null; + videoCapturer = + new OrientationAwareScreenCapturer( + mediaProjectionData, + new MediaProjection.Callback() { + @Override + public void onStop() { + super.onStop(); + // After Huawei P30 and Android 10 version test, the onstop method is called, which will not affect the next process, + // and there is no need to call the resulterror method + //resultError("MediaProjection.Callback()", "User revoked permission to capture the screen.", result); + } + }); + if (videoCapturer == null) { + resultError("screenRequestPermissions", "GetDisplayMediaFailed, User revoked permission to capture the screen.", result); + return; + } - screenRequestPremissions( - new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult(int requestCode, Bundle resultData) { + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(true); - /* Create ScreenCapture */ - int resultCode = resultData.getInt(GRANT_RESULTS); - Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + String threadName = Thread.currentThread().getName() + "_texture_screen_thread"; + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - if (resultCode != Activity.RESULT_OK) { - result.error(null, "User didn't give permission to capture the screen.", null); - return; - } + WindowManager wm = + (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); - MediaStreamTrack[] tracks = new MediaStreamTrack[1]; - VideoCapturer videoCapturer = null; - videoCapturer = - new ScreenCapturerAndroid( - mediaProjectionData, - new MediaProjection.Callback() { - @Override - public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error(null, "User revoked permission to capture the screen.", null); - } - }); - if (videoCapturer == null) { - result.error( - /* type */ "GetDisplayMediaFailed", "Failed to create new VideoCapturer!", null); - return; - } + Display display = wm.getDefaultDisplay(); + Point size = new Point(); + display.getRealSize(size); - PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); - VideoSource videoSource = pcFactory.createVideoSource(true); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); + info.width = size.x; + info.height = size.y; + info.fps = DEFAULT_FPS; + info.isScreenCapture = true; + info.capturer = videoCapturer; - String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = - SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize( - surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + videoCapturer.startCapture(info.width, info.height, info.fps); + Log.d(TAG, "OrientationAwareScreenCapturer.startCapture: " + info.width + "x" + info.height + "@" + info.fps); - WindowManager wm = - (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, info); - int width = wm.getDefaultDisplay().getWidth(); - int height = wm.getDefaultDisplay().getHeight(); - int fps = DEFAULT_FPS; + displayTrack = pcFactory.createVideoTrack(trackId, videoSource); - videoCapturer.startCapture(width, height, fps); - Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + ConstraintsMap successResult = new ConstraintsMap(); - String trackId = stateProvider.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); + if (displayTrack != null) { + String id = displayTrack.id(); - tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); + LocalVideoTrack displayLocalVideoTrack = new LocalVideoTrack(displayTrack); + videoSource.setVideoProcessor(displayLocalVideoTrack); - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap successResult = new ConstraintsMap(); + stateProvider.putLocalTrack(id, displayLocalVideoTrack); - for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } + ConstraintsMap track_ = new ConstraintsMap(); + String kind = displayTrack.kind(); - String id = track.id(); + track_.putBoolean("enabled", displayTrack.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", displayTrack.state().toString()); + track_.putBoolean("remote", false); - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - stateProvider.getLocalTracks().put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); - } - } + videoTracks.pushMap(track_); + mediaStream.addTrack(displayTrack); + } - String streamId = mediaStream.getId(); + String streamId = mediaStream.getId(); - Log.d(TAG, "MediaStream id: " + streamId); - stateProvider.getLocalStreams().put(streamId, mediaStream); - successResult.putString("streamId", streamId); - successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", videoTracks.toArrayList()); - result.success(successResult.toMap()); - } - }); + Log.d(TAG, "MediaStream id: " + streamId); + stateProvider.putLocalStream(streamId, mediaStream); + successResult.putString("streamId", streamId); + successResult.putArray("audioTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); + result.success(successResult.toMap()); } /** @@ -556,24 +602,28 @@ private void getUserMedia( Result result, MediaStream mediaStream, List grantedPermissions) { - MediaStreamTrack[] tracks = new MediaStreamTrack[2]; + ConstraintsMap[] trackParams = new ConstraintsMap[2]; // If we fail to create either, destroy the other one and fail. if ((grantedPermissions.contains(PERMISSION_AUDIO) - && (tracks[0] = getUserAudio(constraints)) == null) + && (trackParams[0] = getUserAudio(constraints, mediaStream)) == null) || (grantedPermissions.contains(PERMISSION_VIDEO) - && (tracks[1] = getUserVideo(constraints)) == null)) { - for (MediaStreamTrack track : tracks) { + && (trackParams[1] = getUserVideo(constraints, mediaStream)) == null)) { + for (MediaStreamTrack track : mediaStream.audioTracks) { + if (track != null) { + track.dispose(); + } + } + for (MediaStreamTrack track : mediaStream.videoTracks) { if (track != null) { track.dispose(); } } - // XXX The following does not follow the getUserMedia() algorithm // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error(/* type */ "GetUserMediaFailed", "Failed to create new track", null); + resultError("getUserMedia", "Failed to create new track.", result); return; } @@ -581,41 +631,20 @@ private void getUserMedia( ConstraintsArray videoTracks = new ConstraintsArray(); ConstraintsMap successResult = new ConstraintsMap(); - for (MediaStreamTrack track : tracks) { - if (track == null) { + for (ConstraintsMap trackParam : trackParams) { + if (trackParam == null) { continue; } - - String id = track.id(); - - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - stateProvider.getLocalTracks().put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); + if (trackParam.getString("kind").equals("audio")) { + audioTracks.pushMap(trackParam); } else { - videoTracks.pushMap(track_); + videoTracks.pushMap(trackParam); } } String streamId = mediaStream.getId(); - Log.d(TAG, "MediaStream id: " + streamId); - stateProvider.getLocalStreams().put(streamId, mediaStream); + stateProvider.putLocalStream(streamId, mediaStream); successResult.putString("streamId", streamId); successResult.putArray("audioTracks", audioTracks.toArrayList()); @@ -623,7 +652,46 @@ private void getUserMedia( result.success(successResult.toMap()); } - private VideoTrack getUserVideo(ConstraintsMap constraints) { + private boolean isFacing = true; + + /** + * @return Returns the integer at the key, or the `ideal` property if it is a map. + */ + @Nullable + private Integer getConstrainInt(@Nullable ConstraintsMap constraintsMap, String key) { + if (constraintsMap == null) { + return null; + } + + if (constraintsMap.getType(key) == ObjectType.Number) { + try { + return constraintsMap.getInt(key); + } catch (Exception e) { + // Could be a double instead + return (int) Math.round(constraintsMap.getDouble(key)); + } + } + + if (constraintsMap.getType(key) == ObjectType.String) { + try { + return Integer.parseInt(constraintsMap.getString(key)); + } catch (Exception e) { + // Could be a double instead + return (int) Math.round(Double.parseDouble(constraintsMap.getString(key))); + } + } + + if (constraintsMap.getType(key) == ObjectType.Map) { + ConstraintsMap innerMap = constraintsMap.getMap(key); + if (constraintsMap.getType("ideal") == ObjectType.Number) { + return innerMap.getInt("ideal"); + } + } + + return null; + } + + private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream mediaStream) { ConstraintsMap videoConstraintsMap = null; ConstraintsMap videoConstraintsMandatory = null; if (constraints.getType("video") == ObjectType.Map) { @@ -653,58 +721,143 @@ private VideoTrack getUserVideo(ConstraintsMap constraints) { } String facingMode = getFacingMode(videoConstraintsMap); - boolean isFacing = facingMode == null || !facingMode.equals("environment"); - String sourceId = getSourceIdConstraint(videoConstraintsMap); + isFacing = facingMode == null || !facingMode.equals("environment"); + String deviceId = getSourceIdConstraint(videoConstraintsMap); + CameraEventsHandler cameraEventsHandler = new CameraEventsHandler(); + Pair result = createVideoCapturer(cameraEnumerator, isFacing, deviceId, cameraEventsHandler); - VideoCapturer videoCapturer = createVideoCapturer(cameraEnumerator, isFacing, sourceId); - - if (videoCapturer == null) { + if (result == null) { return null; } + deviceId = result.first; + VideoCapturer videoCapturer = result.second; + + if (facingMode == null && cameraEnumerator.isFrontFacing(deviceId)) { + facingMode = "user"; + } else if (facingMode == null && cameraEnumerator.isBackFacing(deviceId)) { + facingMode = "environment"; + } + // else, leave facingMode as it was + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); VideoSource videoSource = pcFactory.createVideoSource(false); - String threadName = Thread.currentThread().getName(); + String threadName = Thread.currentThread().getName() + "_texture_camera_thread"; SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + + if (surfaceTextureHelper == null) { + Log.e(TAG, "surfaceTextureHelper is null"); + return null; + } + videoCapturer.initialize( surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - // Fall back to defaults if keys are missing. - int width = - videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") - ? videoConstraintsMandatory.getInt("minWidth") - : DEFAULT_WIDTH; - int height = - videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") - ? videoConstraintsMandatory.getInt("minHeight") - : DEFAULT_HEIGHT; - int fps = - videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") - ? videoConstraintsMandatory.getInt("minFrameRate") - : DEFAULT_FPS; - - videoCapturer.startCapture(width, height, fps); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); + + Integer videoWidth = getConstrainInt(videoConstraintsMap, "width"); + int targetWidth = videoWidth != null + ? videoWidth + : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") + ? videoConstraintsMandatory.getInt("minWidth") + : DEFAULT_WIDTH; + + Integer videoHeight = getConstrainInt(videoConstraintsMap, "height"); + int targetHeight = videoHeight != null + ? videoHeight + : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") + ? videoConstraintsMandatory.getInt("minHeight") + : DEFAULT_HEIGHT; + + Integer videoFrameRate = getConstrainInt(videoConstraintsMap, "frameRate"); + int targetFps = videoFrameRate != null + ? videoFrameRate + : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") + ? videoConstraintsMandatory.getInt("minFrameRate") + : DEFAULT_FPS; + + info.width = targetWidth; + info.height = targetHeight; + info.fps = targetFps; + info.capturer = videoCapturer; + info.cameraName = deviceId; + + // Find actual capture format. + Size actualSize = null; + if (videoCapturer instanceof Camera1Capturer) { + int cameraId = Camera1Helper.getCameraId(deviceId); + actualSize = Camera1Helper.findClosestCaptureFormat(cameraId, targetWidth, targetHeight); + } else if (videoCapturer instanceof Camera2Capturer) { + CameraManager cameraManager = (CameraManager) applicationContext.getSystemService(Context.CAMERA_SERVICE); + actualSize = Camera2Helper.findClosestCaptureFormat(cameraManager, deviceId, targetWidth, targetHeight); + } + + if (actualSize != null) { + info.width = actualSize.width; + info.height = actualSize.height; + } + + info.cameraEventsHandler = cameraEventsHandler; + videoCapturer.startCapture(targetWidth, targetHeight, targetFps); + + cameraEventsHandler.waitForCameraOpen(); + String trackId = stateProvider.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); + mVideoCapturers.put(trackId, info); + mSurfaceTextureHelpers.put(trackId, surfaceTextureHelper); + + Log.d(TAG, "Target: " + targetWidth + "x" + targetHeight + "@" + targetFps + ", Actual: " + info.width + "x" + info.height + "@" + info.fps); + + VideoTrack track = pcFactory.createVideoTrack(trackId, videoSource); + mediaStream.addTrack(track); + + LocalVideoTrack localVideoTrack = new LocalVideoTrack(track); + videoSource.setVideoProcessor(localVideoTrack); + + stateProvider.putLocalTrack(track.id(),localVideoTrack); + + ConstraintsMap trackParams = new ConstraintsMap(); - Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); - videoSource.adaptOutputFormat(width, height, fps); + trackParams.putBoolean("enabled", track.enabled()); + trackParams.putString("id", track.id()); + trackParams.putString("kind", "video"); + trackParams.putString("label", track.id()); + trackParams.putString("readyState", track.state().toString()); + trackParams.putBoolean("remote", false); - return pcFactory.createVideoTrack(trackId, videoSource); + ConstraintsMap settings = new ConstraintsMap(); + settings.putString("deviceId", deviceId); + settings.putString("kind", "videoinput"); + settings.putInt("width", info.width); + settings.putInt("height", info.height); + settings.putInt("frameRate", info.fps); + if (facingMode != null) settings.putString("facingMode", facingMode); + trackParams.putMap("settings", settings.toMap()); + + return trackParams; } void removeVideoCapturer(String id) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer != null) { + VideoCapturerInfoEx info = mVideoCapturers.get(id); + if (info != null) { try { - videoCapturer.stopCapture(); + info.capturer.stopCapture(); + if (info.cameraEventsHandler != null) { + info.cameraEventsHandler.waitForCameraClosed(); + } } catch (InterruptedException e) { Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); } finally { - videoCapturer.dispose(); + info.capturer.dispose(); mVideoCapturers.remove(id); + SurfaceTextureHelper helper = mSurfaceTextureHelpers.get(id); + if (helper != null) { + helper.stopListening(); + helper.dispose(); + mSurfaceTextureHelpers.remove(id); + } } } } @@ -744,32 +897,51 @@ private void requestPermissions( }; final Activity activity = stateProvider.getActivity(); - if (activity != null) { - PermissionUtils.requestPermissions( - activity, permissions.toArray(new String[permissions.size()]), callback); - } + final Context context = stateProvider.getApplicationContext(); + PermissionUtils.requestPermissions( + context, + activity, + permissions.toArray(new String[permissions.size()]), callback); } void switchCamera(String id, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); + VideoCapturer videoCapturer = mVideoCapturers.get(id).capturer; if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + id, null); + resultError("switchCamera", "Video capturer not found for id: " + id, result); return; } - CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; - cameraVideoCapturer.switchCamera( - new CameraVideoCapturer.CameraSwitchHandler() { - @Override - public void onCameraSwitchDone(boolean b) { - result.success(b); - } + CameraEnumerator cameraEnumerator; - @Override - public void onCameraSwitchError(String s) { - result.error("Switching camera failed", s, null); - } - }); + if (Camera2Enumerator.isSupported(applicationContext)) { + Log.d(TAG, "Creating video capturer using Camera2 API."); + cameraEnumerator = new Camera2Enumerator(applicationContext); + } else { + Log.d(TAG, "Creating video capturer using Camera1 API."); + cameraEnumerator = new Camera1Enumerator(false); + } + // if sourceId given, use specified sourceId first + final String[] deviceNames = cameraEnumerator.getDeviceNames(); + for (String name : deviceNames) { + if (cameraEnumerator.isFrontFacing(name) == !isFacing) { + CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera( + new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + isFacing = !isFacing; + result.success(b); + } + + @Override + public void onCameraSwitchError(String s) { + resultError("switchCamera", "Switching camera failed: " + id, result); + } + }, name); + return; + } + } + resultError("switchCamera", "Switching camera failed: " + id, result); } /** @@ -797,209 +969,120 @@ void startRecordingToFile( mediaRecorders.append(id, mediaRecorder); } - void stopRecording(Integer id) { - MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); - if (mediaRecorder != null) { - mediaRecorder.stopRecording(); - mediaRecorders.remove(id); - File file = mediaRecorder.getRecordFile(); - if (file != null) { - ContentValues values = new ContentValues(3); - values.put(MediaStore.Video.Media.TITLE, file.getName()); - values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); - values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); - applicationContext - .getContentResolver() - .insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); - } - } - } + void stopRecording(Integer id, String albumName) { + try { + MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); + if (mediaRecorder != null) { + mediaRecorder.stopRecording(); + mediaRecorders.remove(id); + File file = mediaRecorder.getRecordFile(); + Uri collection; + + if (file != null) { + ContentValues values = new ContentValues(); + values.put(MediaStore.Video.Media.TITLE, file.getName()); + values.put(MediaStore.Video.Media.DISPLAY_NAME, file.getName()); + values.put(MediaStore.Video.Media.ALBUM, albumName); + values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); + values.put(MediaStore.Video.Media.DATE_ADDED, System.currentTimeMillis() / 1000); + values.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis()); + + //Android version above 9 MediaStore uses RELATIVE_PATH + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + values.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/" + albumName); + values.put(MediaStore.Video.Media.IS_PENDING, 1); + + collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY); + } else { + //Android version 9 and below MediaStore uses DATA + values.put(MediaStore.Video.Media.DATA, "/storage/emulated/0/Movies/" + albumName + "/" + file.getName()); - void hasTorch(String trackId, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(trackId); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); - return; - } + collection = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; + } - if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP && videoCapturer instanceof Camera2Capturer) { - CameraManager manager; - CameraDevice cameraDevice; + ContentResolver resolver = applicationContext.getContentResolver(); + Uri uriSavedMedia = resolver.insert(collection, values); - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } + assert uriSavedMedia != null; + ParcelFileDescriptor pfd = resolver.openFileDescriptor(uriSavedMedia, "w"); + assert pfd != null; + FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor()); - boolean flashIsAvailable; - try { - CameraCharacteristics characteristics = - manager.getCameraCharacteristics(cameraDevice.getId()); - flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } + InputStream in = new FileInputStream(file); - result.success(flashIsAvailable); - return; - } + byte[] buf = new byte[8192]; + int len; - if (videoCapturer instanceof Camera1Capturer) { - Camera camera; + while ((len = in.read(buf)) > 0) { + out.write(buf, 0, len); + } - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; + out.close(); + in.close(); + pfd.close(); + values.clear(); + } } + } catch(Exception e){ - Parameters params = camera.getParameters(); - List supportedModes = params.getSupportedFlashModes(); - - result.success( - (supportedModes == null) ? false : supportedModes.contains(Parameters.FLASH_MODE_TORCH)); - return; } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); } - @RequiresApi(api = VERSION_CODES.LOLLIPOP) - void setTorch(String trackId, boolean torch, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(trackId); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); - return; - } - - if (videoCapturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - CameraManager manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); - captureSession = - (CameraCaptureSession) - getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = - (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = - (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - try { - final CaptureRequest.Builder captureRequestBuilder = - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - captureRequestBuilder.set( - CaptureRequest.FLASH_MODE, - torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>( - captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest( - captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); + public void reStartCamera(IsCameraEnabled getCameraId) { + for (Map.Entry item : mVideoCapturers.entrySet()) { + if (!item.getValue().isScreenCapture && getCameraId.isEnabled(item.getKey())) { + item.getValue().capturer.startCapture( + item.getValue().width, + item.getValue().height, + item.getValue().fps + ); } - - result.success(null); - return; } + } - if (videoCapturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode( - torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - camera.setParameters(params); + public interface IsCameraEnabled { + boolean isEnabled(String id); + } - result.success(null); - return; - } + public static class VideoCapturerInfoEx extends VideoCapturerInfo { + public CameraEventsHandler cameraEventsHandler; + } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + public VideoCapturerInfoEx getCapturerInfo(String trackId) { + return mVideoCapturers.get(trackId); } - private Object getPrivateProperty(Class klass, Object object, String fieldName) - throws NoSuchFieldWithNameException { - try { - Field field = klass.getDeclaredField(fieldName); - field.setAccessible(true); - return field.get(object); - } catch (NoSuchFieldException e) { - throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); - } catch (IllegalAccessException e) { - // Should never happen since we are calling `setAccessible(true)` - throw new RuntimeException(e); + @RequiresApi(api = VERSION_CODES.M) + void setPreferredInputDevice(String deviceId) { + android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); + final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); + if (devices.length > 0) { + for (int i = 0; i < devices.length; i++) { + AudioDeviceInfo device = devices[i]; + if(deviceId.equals(AudioUtils.getAudioDeviceId(device))) { + preferredInput = device; + audioDeviceModule.setPreferredInputDevice(preferredInput); + return; + } + } } } - private class NoSuchFieldWithNameException extends NoSuchFieldException { - - String className; - String fieldName; - - NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { - super(e.getMessage()); - this.className = className; - this.fieldName = fieldName; + @RequiresApi(api = VERSION_CODES.M) + int getPreferredInputDevice(AudioDeviceInfo deviceInfo) { + if (deviceInfo == null) { + return -1; + } + android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); + final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); + for (int i = 0; i < devices.length; i++) { + if (devices[i].getId() == deviceInfo.getId()) { + return i; + } } + return -1; } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/LocalTrack.java b/android/src/main/java/com/cloudwebrtc/webrtc/LocalTrack.java new file mode 100644 index 0000000000..6135fdf1b3 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/LocalTrack.java @@ -0,0 +1,31 @@ +package com.cloudwebrtc.webrtc; + +import org.webrtc.MediaStreamTrack; + +public class LocalTrack { + public LocalTrack(MediaStreamTrack track) { + this.track = track; + } + + public MediaStreamTrack track; + + public void dispose() { + track.dispose(); + } + + public boolean enabled() { + return track.enabled(); + } + + public void setEnabled(boolean enabled) { + track.setEnabled(enabled); + } + + public String id() { + return track.id(); + } + + public String kind() { + return track.kind(); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 21f8c91975..8444c0e66b 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -1,27 +1,50 @@ package com.cloudwebrtc.webrtc; +import static com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils.parseMediaConstraints; + import android.app.Activity; import android.content.Context; +import android.content.pm.PackageManager; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.hardware.Camera.CameraInfo; +import android.media.MediaRecorder; +import android.media.AudioAttributes; +import android.media.AudioDeviceInfo; +import android.os.Build; import android.util.Log; import android.util.LongSparseArray; +import android.view.Surface; import androidx.annotation.NonNull; import androidx.annotation.Nullable; - +import androidx.annotation.RequiresApi; + +import com.cloudwebrtc.webrtc.audio.AudioDeviceKind; +import com.cloudwebrtc.webrtc.audio.AudioProcessingController; +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.audio.AudioUtils; +import com.cloudwebrtc.webrtc.audio.LocalAudioTrack; +import com.cloudwebrtc.webrtc.audio.PlaybackSamplesReadyCallbackAdapter; +import com.cloudwebrtc.webrtc.audio.RecordSamplesReadyCallbackAdapter; import com.cloudwebrtc.webrtc.record.AudioChannel; import com.cloudwebrtc.webrtc.record.FrameCapturer; import com.cloudwebrtc.webrtc.utils.AnyThreadResult; +import com.cloudwebrtc.webrtc.utils.Callback; import com.cloudwebrtc.webrtc.utils.ConstraintsArray; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; import com.cloudwebrtc.webrtc.utils.EglUtils; import com.cloudwebrtc.webrtc.utils.ObjectType; +import com.cloudwebrtc.webrtc.utils.PermissionUtils; +import com.cloudwebrtc.webrtc.utils.Utils; +import com.cloudwebrtc.webrtc.video.VideoCapturerInfo; +import com.cloudwebrtc.webrtc.video.camera.CameraUtils; +import com.cloudwebrtc.webrtc.video.camera.Point; +import com.cloudwebrtc.webrtc.video.LocalVideoTrack; +import com.twilio.audioswitch.AudioDevice; import org.webrtc.AudioTrack; -import org.webrtc.DefaultVideoDecoderFactory; -import org.webrtc.DefaultVideoEncoderFactory; +import org.webrtc.CryptoOptions; import org.webrtc.DtmfSender; import org.webrtc.EglBase; import org.webrtc.IceCandidate; @@ -45,6 +68,7 @@ import org.webrtc.PeerConnectionFactory; import org.webrtc.PeerConnectionFactory.InitializationOptions; import org.webrtc.PeerConnectionFactory.Options; +import org.webrtc.RtpCapabilities; import org.webrtc.RtpSender; import org.webrtc.SdpObserver; import org.webrtc.SessionDescription; @@ -52,10 +76,12 @@ import org.webrtc.VideoTrack; import org.webrtc.audio.AudioDeviceModule; import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.video.CustomVideoDecoderFactory; +import org.webrtc.video.CustomVideoEncoderFactory; import java.io.File; -import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -71,35 +97,21 @@ import io.flutter.view.TextureRegistry; import io.flutter.view.TextureRegistry.SurfaceTextureEntry; -import static com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils.parseMediaConstraints; - public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { - - - interface AudioManager { - - void onAudioManagerRequested(boolean requested); - - void setMicrophoneMute(boolean mute); - - void setSpeakerphoneOn(boolean on); - - - } - static public final String TAG = "FlutterWebRTCPlugin"; private final Map mPeerConnectionObservers = new HashMap<>(); - private BinaryMessenger messenger; - private Context context; + private final BinaryMessenger messenger; + private final Context context; private final TextureRegistry textures; - private PeerConnectionFactory mFactory; - private final Map localStreams = new HashMap<>(); - private final Map localTracks = new HashMap<>(); + private final Map localTracks = new HashMap<>(); + private final LongSparseArray renders = new LongSparseArray<>(); - private LongSparseArray renders = new LongSparseArray<>(); + public RecordSamplesReadyCallbackAdapter recordSamplesReadyCallbackAdapter; + + public PlaybackSamplesReadyCallbackAdapter playbackSamplesReadyCallbackAdapter; /** * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce @@ -107,66 +119,238 @@ interface AudioManager { */ private GetUserMediaImpl getUserMediaImpl; - private final AudioManager audioManager; + private CameraUtils cameraUtils; private AudioDeviceModule audioDeviceModule; + private FlutterRTCFrameCryptor frameCryptor; + private Activity activity; - MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry, - @NonNull AudioManager audioManager) { + private CustomVideoEncoderFactory videoEncoderFactory; + + private CustomVideoDecoderFactory videoDecoderFactory; + + public AudioProcessingController audioProcessingController; + + MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry) { this.context = context; this.textures = textureRegistry; this.messenger = messenger; - this.audioManager = audioManager; + } + + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); } void dispose() { + for (final MediaStream mediaStream : localStreams.values()) { + streamDispose(mediaStream); + mediaStream.dispose(); + } + localStreams.clear(); + for (final LocalTrack track : localTracks.values()) { + track.dispose(); + } + localTracks.clear(); + for (final PeerConnectionObserver connection : mPeerConnectionObservers.values()) { + peerConnectionDispose(connection); + } mPeerConnectionObservers.clear(); } - - private void ensureInitialized() { + private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List forceSWCodecList, + @Nullable ConstraintsMap androidAudioConfiguration) { if (mFactory != null) { return; } PeerConnectionFactory.initialize( - InitializationOptions.builder(context) - .setEnableInternalTracer(true) - .createInitializationOptions()); + InitializationOptions.builder(context) + .setEnableInternalTracer(true) + .createInitializationOptions()); + + getUserMediaImpl = new GetUserMediaImpl(this, context); + + cameraUtils = new CameraUtils(getUserMediaImpl, activity); + + frameCryptor = new FlutterRTCFrameCryptor(this); + + AudioAttributes audioAttributes = null; + if (androidAudioConfiguration != null) { + Integer usageType = AudioUtils.getAudioAttributesUsageTypeForString( + androidAudioConfiguration.getString("androidAudioAttributesUsageType")); + Integer contentType = AudioUtils.getAudioAttributesContentTypeFromString( + androidAudioConfiguration.getString("androidAudioAttributesContentType")); + + // Warn if one is provided without the other. + if (usageType == null ^ contentType == null) { + Log.w(TAG, "usageType and contentType must both be provided!"); + } + + if (usageType != null && contentType != null) { + audioAttributes = new AudioAttributes.Builder() + .setUsage(usageType) + .setContentType(contentType) + .build(); + } + } + JavaAudioDeviceModule.Builder audioDeviceModuleBuilder = JavaAudioDeviceModule.builder(context); + + recordSamplesReadyCallbackAdapter = new RecordSamplesReadyCallbackAdapter(); + playbackSamplesReadyCallbackAdapter = new PlaybackSamplesReadyCallbackAdapter(); + + if(bypassVoiceProcessing) { + audioDeviceModuleBuilder.setUseHardwareAcousticEchoCanceler(false) + .setUseHardwareNoiseSuppressor(false) + .setUseStereoInput(true) + .setUseStereoOutput(true) + .setAudioSource(MediaRecorder.AudioSource.MIC); + } else { + boolean useHardwareAudioProcessing = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q; + boolean useLowLatency = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O; + audioDeviceModuleBuilder.setUseHardwareAcousticEchoCanceler(useHardwareAudioProcessing) + .setUseLowLatency(useLowLatency) + .setUseHardwareNoiseSuppressor(useHardwareAudioProcessing); + } + + audioDeviceModuleBuilder.setSamplesReadyCallback(recordSamplesReadyCallbackAdapter); + audioDeviceModuleBuilder.setPlaybackSamplesReadyCallback(playbackSamplesReadyCallbackAdapter); + + recordSamplesReadyCallbackAdapter.addCallback(getUserMediaImpl.inputSamplesInterceptor); + + recordSamplesReadyCallbackAdapter.addCallback(new JavaAudioDeviceModule.SamplesReadyCallback() { + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + for(LocalTrack track : localTracks.values()) { + if (track instanceof LocalAudioTrack) { + ((LocalAudioTrack) track).onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + } + }); + + if (audioAttributes != null) { + audioDeviceModuleBuilder.setAudioAttributes(audioAttributes); + } + + audioDeviceModule = audioDeviceModuleBuilder.createAudioDeviceModule(); + + if(!bypassVoiceProcessing) { + if(JavaAudioDeviceModule.isBuiltInNoiseSuppressorSupported()) { + audioDeviceModule.setNoiseSuppressorEnabled(true); + } + } + + + getUserMediaImpl.audioDeviceModule = (JavaAudioDeviceModule) audioDeviceModule; + + final Options options = new Options(); + options.networkIgnoreMask = networkIgnoreMask; + + final PeerConnectionFactory.Builder factoryBuilder = PeerConnectionFactory.builder() + .setOptions(options); // Initialize EGL contexts required for HW acceleration. EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); - getUserMediaImpl = new GetUserMediaImpl(this, context); + videoEncoderFactory = new CustomVideoEncoderFactory(eglContext, true, true); + videoDecoderFactory = new CustomVideoDecoderFactory(eglContext); - audioDeviceModule = JavaAudioDeviceModule.builder(context) - .setUseHardwareAcousticEchoCanceler(true) - .setUseHardwareNoiseSuppressor(true) - .setSamplesReadyCallback(getUserMediaImpl.inputSamplesInterceptor) - .createAudioDeviceModule(); + factoryBuilder + .setVideoEncoderFactory(videoEncoderFactory) + .setVideoDecoderFactory(videoDecoderFactory); - getUserMediaImpl.audioDeviceModule = (JavaAudioDeviceModule) audioDeviceModule; + videoDecoderFactory.setForceSWCodec(forceSWCodec); + videoDecoderFactory.setForceSWCodecList(forceSWCodecList); + videoEncoderFactory.setForceSWCodec(forceSWCodec); + videoEncoderFactory.setForceSWCodecList(forceSWCodecList); + + audioProcessingController = new AudioProcessingController(); + + factoryBuilder.setAudioProcessingFactory(audioProcessingController.externalAudioProcessingFactory); + + mFactory = factoryBuilder + .setAudioDeviceModule(audioDeviceModule) + .createPeerConnectionFactory(); - mFactory = PeerConnectionFactory.builder() - .setOptions(new Options()) - .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglContext, false, true)) - .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglContext)) - .setAudioDeviceModule(audioDeviceModule) - .createPeerConnectionFactory(); } @Override public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { - ensureInitialized(); final AnyThreadResult result = new AnyThreadResult(notSafeResult); switch (call.method) { + case "initialize": { + int networkIgnoreMask = Options.ADAPTER_TYPE_UNKNOWN; + Map options = call.argument("options"); + ConstraintsMap constraintsMap = new ConstraintsMap(options); + if (constraintsMap.hasKey("networkIgnoreMask") + && constraintsMap.getType("networkIgnoreMask") == ObjectType.Array) { + final ConstraintsArray ignoredAdapters = constraintsMap.getArray("networkIgnoreMask"); + if (ignoredAdapters != null) { + for (Object adapter : ignoredAdapters.toArrayList()) { + switch (adapter.toString()) { + case "adapterTypeEthernet": + networkIgnoreMask += Options.ADAPTER_TYPE_ETHERNET; + break; + case "adapterTypeWifi": + networkIgnoreMask += Options.ADAPTER_TYPE_WIFI; + break; + case "adapterTypeCellular": + networkIgnoreMask += Options.ADAPTER_TYPE_CELLULAR; + break; + case "adapterTypeVpn": + networkIgnoreMask += Options.ADAPTER_TYPE_VPN; + break; + case "adapterTypeLoopback": + networkIgnoreMask += Options.ADAPTER_TYPE_LOOPBACK; + break; + case "adapterTypeAny": + networkIgnoreMask += Options.ADAPTER_TYPE_ANY; + break; + } + } + + } + } + boolean forceSWCodec = false; + if (constraintsMap.hasKey("forceSWCodec") + && constraintsMap.getType("forceSWCodec") == ObjectType.Boolean) { + final boolean v = constraintsMap.getBoolean("forceSWCodec"); + forceSWCodec = v; + } + List forceSWCodecList = new ArrayList<>(); + if(constraintsMap.hasKey("forceSWCodecList") + && constraintsMap.getType("forceSWCodecList") == ObjectType.Array) { + final List array = constraintsMap.getListArray("forceSWCodecList"); + for(Object v : array) { + forceSWCodecList.add(v.toString()); + } + } else { + // disable HW Codec for VP9 by default. + forceSWCodecList.add("VP9"); + } + + ConstraintsMap androidAudioConfiguration = null; + if (constraintsMap.hasKey("androidAudioConfiguration") + && constraintsMap.getType("androidAudioConfiguration") == ObjectType.Map) { + androidAudioConfiguration = constraintsMap.getMap("androidAudioConfiguration"); + } + boolean enableBypassVoiceProcessing = false; + if(options.get("bypassVoiceProcessing") != null) { + enableBypassVoiceProcessing = (boolean)options.get("bypassVoiceProcessing"); + } + initialize(enableBypassVoiceProcessing, networkIgnoreMask, forceSWCodec, forceSWCodecList, androidAudioConfiguration); + result.success(null); + break; + } case "createPeerConnection": { Map constraints = call.argument("constraints"); Map configuration = call.argument("configuration"); String peerConnectionId = peerConnectionInit(new ConstraintsMap(configuration), - new ConstraintsMap((constraints))); + new ConstraintsMap((constraints))); ConstraintsMap res = new ConstraintsMap(); res.putString("peerConnectionId", peerConnectionId); result.success(res.toMap()); @@ -203,7 +387,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { List audioTracks = new ArrayList<>(); List videoTracks = new ArrayList<>(); for (AudioTrack track : stream.audioTracks) { - localTracks.put(track.id(), track); + localTracks.put(track.id(), new LocalAudioTrack(track)); Map trackMap = new HashMap<>(); trackMap.put("enabled", track.enabled()); trackMap.put("id", track.id()); @@ -214,7 +398,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { audioTracks.add(trackMap); } for (VideoTrack track : stream.videoTracks) { - localTracks.put(track.id(), track); + localTracks.put(track.id(), new LocalVideoTrack(track)); Map trackMap = new HashMap<>(); trackMap.put("enabled", track.enabled()); trackMap.put("id", track.id()); @@ -245,14 +429,14 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { String peerConnectionId = call.argument("peerConnectionId"); Map description = call.argument("description"); peerConnectionSetLocalDescription(new ConstraintsMap(description), peerConnectionId, - result); + result); break; } case "setRemoteDescription": { String peerConnectionId = call.argument("peerConnectionId"); Map description = call.argument("description"); peerConnectionSetRemoteDescription(new ConstraintsMap(description), peerConnectionId, - result); + result); break; } case "sendDtmf": { @@ -265,9 +449,9 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { RtpSender audioSender = null; for (RtpSender sender : peerConnection.getSenders()) { - if (sender.track().kind().equals("audio")) { - audioSender = sender; - } + if (sender != null && sender.track() != null && sender.track().kind().equals("audio")) { + audioSender = sender; + } } if (audioSender != null) { DtmfSender dtmfSender = audioSender.dtmf(); @@ -275,10 +459,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } result.success("success"); } else { - Log.d(TAG, "dtmf() peerConnection is null"); - result - .error("dtmf", "sendDtmf() peerConnection is null", - null); + resultError("dtmf", "peerConnection is null", result); } break; } @@ -301,23 +482,23 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { createDataChannel(peerConnectionId, label, new ConstraintsMap(dataChannelDict), result); break; } + case "dataChannelGetBufferedAmount": { + String peerConnectionId = call.argument("peerConnectionId"); + String dataChannelId = call.argument("dataChannelId"); + dataChannelGetBufferedAmount(peerConnectionId, dataChannelId, result); + break; + } case "dataChannelSend": { String peerConnectionId = call.argument("peerConnectionId"); - int dataChannelId = call.argument("dataChannelId"); + String dataChannelId = call.argument("dataChannelId"); String type = call.argument("type"); Boolean isBinary = type.equals("binary"); ByteBuffer byteBuffer; if (isBinary) { byteBuffer = ByteBuffer.wrap(call.argument("data")); } else { - try { String data = call.argument("data"); - byteBuffer = ByteBuffer.wrap(data.getBytes("UTF-8")); - } catch (UnsupportedEncodingException e) { - Log.d(TAG, "Could not encode text string as UTF-8."); - result.error("dataChannelSendFailed", "Could not encode text string as UTF-8.", null); - return; - } + byteBuffer = ByteBuffer.wrap(data.getBytes(StandardCharsets.UTF_8)); } dataChannelSend(peerConnectionId, dataChannelId, byteBuffer, isBinary); result.success(null); @@ -325,24 +506,22 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } case "dataChannelClose": { String peerConnectionId = call.argument("peerConnectionId"); - int dataChannelId = call.argument("dataChannelId"); + String dataChannelId = call.argument("dataChannelId"); dataChannelClose(peerConnectionId, dataChannelId); result.success(null); break; } case "streamDispose": { String streamId = call.argument("streamId"); - mediaStreamRelease(streamId); + streamDispose(streamId); result.success(null); break; } case "mediaStreamTrackSetEnable": { String trackId = call.argument("trackId"); Boolean enabled = call.argument("enabled"); - MediaStreamTrack track = getTrackForId(trackId); - if (track != null) { - track.setEnabled(enabled); - } + String peerConnectionId = call.argument("peerConnectionId"); + mediaStreamTrackSetEnabled(trackId, enabled, peerConnectionId); result.success(null); break; } @@ -350,17 +529,33 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { String streamId = call.argument("streamId"); String trackId = call.argument("trackId"); mediaStreamAddTrack(streamId, trackId, result); + for (int i = 0; i < renders.size(); i++) { + FlutterRTCVideoRenderer renderer = renders.valueAt(i); + if (renderer.checkMediaStream(streamId, "local")) { + LocalTrack track = localTracks.get(trackId); + if(track != null) { + renderer.setVideoTrack((VideoTrack) track.track); + } + } + } break; } case "mediaStreamRemoveTrack": { String streamId = call.argument("streamId"); String trackId = call.argument("trackId"); mediaStreamRemoveTrack(streamId, trackId, result); + removeStreamForRendererById(streamId); break; } case "trackDispose": { String trackId = call.argument("trackId"); - localTracks.remove(trackId); + trackDispose(trackId); + result.success(null); + break; + } + case "restartIce": { + String peerConnectionId = call.argument("peerConnectionId"); + restartIce(peerConnectionId); result.success(null); break; } @@ -377,22 +572,21 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { break; } case "createVideoRenderer": { - SurfaceTextureEntry entry = textures.createSurfaceTexture(); - SurfaceTexture surfaceTexture = entry.surfaceTexture(); - FlutterRTCVideoRenderer render = new FlutterRTCVideoRenderer(surfaceTexture, entry); - renders.put(entry.id(), render); + TextureRegistry.SurfaceProducer producer = textures.createSurfaceProducer(); + FlutterRTCVideoRenderer render = new FlutterRTCVideoRenderer(producer); + renders.put(producer.id(), render); EventChannel eventChannel = - new EventChannel( - messenger, - "FlutterWebRTC/Texture" + entry.id()); + new EventChannel( + messenger, + "FlutterWebRTC/Texture" + producer.id()); eventChannel.setStreamHandler(render); render.setEventChannel(eventChannel); - render.setId((int) entry.id()); + render.setId((int) producer.id()); ConstraintsMap params = new ConstraintsMap(); - params.putInt("textureId", (int) entry.id()); + params.putInt("textureId", (int) producer.id()); result.success(params.toMap()); break; } @@ -400,8 +594,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { int textureId = call.argument("textureId"); FlutterRTCVideoRenderer render = renders.get(textureId); if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", - null); + resultError("videoRendererDispose", "render [" + textureId + "] not found !", result); return; } render.Dispose(); @@ -412,29 +605,74 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "videoRendererSetSrcObject": { int textureId = call.argument("textureId"); String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("ownerTag"); + String ownerTag = call.argument("ownerTag"); + String trackId = call.argument("trackId"); FlutterRTCVideoRenderer render = renders.get(textureId); - if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", - null); + resultError("videoRendererSetSrcObject", "render [" + textureId + "] not found !", result); return; } - - MediaStream stream = getStreamForId(streamId, peerConnectionId); - render.setStream(stream); + MediaStream stream = null; + if (ownerTag.equals("local")) { + stream = localStreams.get(streamId); + } else { + stream = getStreamForId(streamId, ownerTag); + } + if (trackId != null && !trackId.equals("0")){ + render.setStream(stream, trackId, ownerTag); + } else { + render.setStream(stream, ownerTag); + } result.success(null); break; } case "mediaStreamTrackHasTorch": { String trackId = call.argument("trackId"); - getUserMediaImpl.hasTorch(trackId, result); + cameraUtils.hasTorch(trackId, result); break; } case "mediaStreamTrackSetTorch": { String trackId = call.argument("trackId"); boolean torch = call.argument("torch"); - getUserMediaImpl.setTorch(trackId, torch, result); + cameraUtils.setTorch(trackId, torch, result); + break; + } + case "mediaStreamTrackSetZoom": { + String trackId = call.argument("trackId"); + double zoomLevel = call.argument("zoomLevel"); + cameraUtils.setZoom(trackId, zoomLevel, result); + break; + } + case "mediaStreamTrackSetFocusMode": { + cameraUtils.setFocusMode(call, result); + break; + } + case "mediaStreamTrackSetFocusPoint":{ + Map focusPoint = call.argument("focusPoint"); + Boolean reset = (Boolean)focusPoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)focusPoint.get("x"); + y = (Double)focusPoint.get("y"); + } + cameraUtils.setFocusPoint(call, new Point(x, y), result); + break; + } + case "mediaStreamTrackSetExposureMode": { + cameraUtils.setExposureMode(call, result); + break; + } + case "mediaStreamTrackSetExposurePoint": { + Map exposurePoint = call.argument("exposurePoint"); + Boolean reset = (Boolean)exposurePoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)exposurePoint.get("x"); + y = (Double)exposurePoint.get("y"); + } + cameraUtils.setExposurePoint(call, new Point(x, y), result); break; } case "mediaStreamTrackSwitchCamera": { @@ -445,20 +683,55 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "setVolume": { String trackId = call.argument("trackId"); double volume = call.argument("volume"); - mediaStreamTrackSetVolume(trackId, volume); + String peerConnectionId = call.argument("peerConnectionId"); + mediaStreamTrackSetVolume(trackId, volume, peerConnectionId); + result.success(null); + break; + } + case "selectAudioOutput": { + String deviceId = call.argument("deviceId"); + AudioSwitchManager.instance.selectAudioOutput(AudioDeviceKind.fromTypeName(deviceId)); + result.success(null); + break; + } + case "clearAndroidCommunicationDevice": { + AudioSwitchManager.instance.clearCommunicationDevice(); result.success(null); break; } case "setMicrophoneMute": boolean mute = call.argument("mute"); - audioManager.setMicrophoneMute(mute); + AudioSwitchManager.instance.setMicrophoneMute(mute); + result.success(null); + break; + case "selectAudioInput": + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { + String deviceId = call.argument("deviceId"); + getUserMediaImpl.setPreferredInputDevice(deviceId); + result.success(null); + } else { + result.notImplemented(); + } + break; + case "setAndroidAudioConfiguration": { + Map configuration = call.argument("configuration"); + AudioSwitchManager.instance.setAudioConfiguration(configuration); result.success(null); break; + } case "enableSpeakerphone": boolean enable = call.argument("enable"); - audioManager.setSpeakerphoneOn(enable); + AudioSwitchManager.instance.enableSpeakerphone(enable); + result.success(null); + break; + case "enableSpeakerphoneButPreferBluetooth": + AudioSwitchManager.instance.enableSpeakerButPreferBluetooth(); result.success(null); break; + case "requestCapturePermission": { + getUserMediaImpl.requestCapturePermission(result); + break; + } case "getDisplayMedia": { Map constraints = call.argument("constraints"); ConstraintsMap constraintsMap = new ConstraintsMap(constraints); @@ -472,14 +745,16 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { String path = call.argument("path"); VideoTrack videoTrack = null; String videoTrackId = call.argument("videoTrackId"); + String peerConnectionId = call.argument("peerConnectionId"); if (videoTrackId != null) { - MediaStreamTrack track = getTrackForId(videoTrackId); + MediaStreamTrack track = getTrackForId(videoTrackId, peerConnectionId); if (track instanceof VideoTrack) { videoTrack = (VideoTrack) track; } } AudioChannel audioChannel = null; - if (call.hasArgument("audioChannel")) { + if (call.hasArgument("audioChannel") + && call.argument("audioChannel") != null) { audioChannel = AudioChannel.values()[(Integer) call.argument("audioChannel")]; } Integer recorderId = call.argument("recorderId"); @@ -487,31 +762,34 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { getUserMediaImpl.startRecordingToFile(path, recorderId, videoTrack, audioChannel); result.success(null); } else { - result.error("0", "No tracks", null); + resultError("startRecordToFile", "No tracks", result); } } catch (Exception e) { - result.error("-1", e.getMessage(), e); + resultError("startRecordToFile", e.getMessage(), result); } break; case "stopRecordToFile": Integer recorderId = call.argument("recorderId"); - getUserMediaImpl.stopRecording(recorderId); + String albumName = call.argument("albumName"); + getUserMediaImpl.stopRecording(recorderId, albumName); result.success(null); break; - case "captureFrame": + case "captureFrame": { String path = call.argument("path"); String videoTrackId = call.argument("trackId"); + String peerConnectionId = call.argument("peerConnectionId"); if (videoTrackId != null) { - MediaStreamTrack track = getTrackForId(videoTrackId); + MediaStreamTrack track = getTrackForId(videoTrackId, peerConnectionId); if (track instanceof VideoTrack) { new FrameCapturer((VideoTrack) track, new File(path), result); } else { - result.error(null, "It's not video track", null); + resultError("captureFrame", "It's not video track", result); } } else { - result.error(null, "Track is null", null); + resultError("captureFrame", "Track is null", result); } break; + } case "getLocalDescription": { String peerConnectionId = call.argument("peerConnectionId"); PeerConnection peerConnection = getPeerConnection(peerConnectionId); @@ -522,9 +800,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { params.putString("type", sdp.type.canonicalForm()); result.success(params.toMap()); } else { - Log.d(TAG, "getLocalDescription() peerConnection is null"); - result.error("getLocalDescriptionFailed", "getLocalDescription() peerConnection is null", - null); + resultError("getLocalDescription", "peerConnection is null", result); } break; } @@ -533,15 +809,16 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { PeerConnection peerConnection = getPeerConnection(peerConnectionId); if (peerConnection != null) { SessionDescription sdp = peerConnection.getRemoteDescription(); - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); + if (null == sdp) { + result.success(null); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } } else { - Log.d(TAG, "getRemoteDescription() peerConnection is null"); - result - .error("getRemoteDescriptionFailed", "getRemoteDescription() peerConnection is null", - null); + resultError("getRemoteDescription", "peerConnection is null", result); } break; } @@ -553,17 +830,236 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { peerConnectionSetConfiguration(new ConstraintsMap(configuration), peerConnection); result.success(null); } else { - Log.d(TAG, "setConfiguration() peerConnection is null"); - result.error("setConfigurationFailed", "setConfiguration() peerConnection is null", null); + resultError("setConfiguration", "peerConnection is null", result); + } + break; + } + case "addTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + List streamIds = call.argument("streamIds"); + addTrack(peerConnectionId, trackId, streamIds, result); + break; + } + case "removeTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String senderId = call.argument("senderId"); + + removeTrack(peerConnectionId, senderId, result); + break; + } + case "addTransceiver": { + String peerConnectionId = call.argument("peerConnectionId"); + Map transceiverInit = call.argument("transceiverInit"); + if (call.hasArgument("trackId")) { + String trackId = call.argument("trackId"); + addTransceiver(peerConnectionId, trackId, transceiverInit, result); + } else if (call.hasArgument("mediaType")) { + String mediaType = call.argument("mediaType"); + addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); + } else { + resultError("addTransceiver", "Incomplete parameters", result); + } + break; + } + case "rtpTransceiverSetDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String direction = call.argument("direction"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverSetDirection(peerConnectionId, direction, transceiverId, result); + break; + } + case "rtpTransceiverGetDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverGetDirection(peerConnectionId, transceiverId, result); + break; + } + case "rtpTransceiverGetCurrentDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverGetCurrentDirection(peerConnectionId, transceiverId, result); + break; + } + case "rtpTransceiverStop": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverStop(peerConnectionId, transceiverId, result); + break; + } + case "rtpSenderSetParameters": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + Map parameters = call.argument("parameters"); + rtpSenderSetParameters(peerConnectionId, rtpSenderId, parameters, result); + break; + } + case "rtpSenderReplaceTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, true, result); + break; + } + case "rtpSenderSetTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, false, result); + break; + } + case "rtpSenderSetStreams": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + List streamIds = call.argument("streamIds"); + rtpSenderSetStreams(peerConnectionId, rtpSenderId, streamIds, result); + break; + } + case "getSenders": { + String peerConnectionId = call.argument("peerConnectionId"); + getSenders(peerConnectionId, result); + break; + } + case "getReceivers": { + String peerConnectionId = call.argument("peerConnectionId"); + getReceivers(peerConnectionId, result); + break; + } + case "getTransceivers": { + String peerConnectionId = call.argument("peerConnectionId"); + getTransceivers(peerConnectionId, result); + break; + } + case "setPreferredInputDevice": { + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { + String deviceId = call.argument("deviceId"); + getUserMediaImpl.setPreferredInputDevice(deviceId); + result.success(null); + } else { + result.notImplemented(); + } + break; + } + case "getRtpSenderCapabilities": { + String kind = call.argument("kind"); + MediaStreamTrack.MediaType mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if (kind.equals("video")) { + mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + } + RtpCapabilities capabilities = mFactory.getRtpSenderCapabilities(mediaType); + result.success(capabilitiestoMap(capabilities).toMap()); + break; + } + case "getRtpReceiverCapabilities": { + String kind = call.argument("kind"); + MediaStreamTrack.MediaType mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if (kind.equals("video")) { + mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + } + RtpCapabilities capabilities = mFactory.getRtpReceiverCapabilities(mediaType); + result.success(capabilitiestoMap(capabilities).toMap()); + break; + } + case "setCodecPreferences": { + String peerConnectionId = call.argument("peerConnectionId"); + List> codecs = call.argument("codecs"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverSetCodecPreferences(peerConnectionId, transceiverId, codecs, result); + break; + } + case "getSignalingState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getSignalingState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.signalingStateString(pc.signalingState())); + result.success(params.toMap()); + } + break; + } + case "getIceGatheringState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getIceGatheringState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.iceGatheringStateString(pc.iceGatheringState())); + result.success(params.toMap()); + } + break; + } + case "getIceConnectionState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getIceConnectionState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.iceConnectionStateString(pc.iceConnectionState())); + result.success(params.toMap()); + } + break; + } + case "getConnectionState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getConnectionState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.connectionStateString(pc.connectionState())); + result.success(params.toMap()); } break; } default: + if(frameCryptor.handleMethodCall(call, result)) { + break; + } result.notImplemented(); break; } } + private ConstraintsMap capabilitiestoMap(RtpCapabilities capabilities) { + ConstraintsMap capabilitiesMap = new ConstraintsMap(); + ConstraintsArray codecArr = new ConstraintsArray(); + for(RtpCapabilities.CodecCapability codec : capabilities.codecs){ + ConstraintsMap codecMap = new ConstraintsMap(); + codecMap.putString("mimeType", codec.mimeType); + codecMap.putInt("clockRate", codec.clockRate); + if(codec.numChannels != null) + codecMap.putInt("channels", codec.numChannels); + List sdpFmtpLineArr = new ArrayList<>(); + for(Map.Entry entry : codec.parameters.entrySet()) { + if(entry.getKey().length() > 0) { + sdpFmtpLineArr.add(entry.getKey() + "=" + entry.getValue()); + } else { + sdpFmtpLineArr.add(entry.getValue()); + } + } + if(sdpFmtpLineArr.size() > 0) + codecMap.putString("sdpFmtpLine", String.join(";", sdpFmtpLineArr)); + codecArr.pushMap(codecMap); + } + ConstraintsArray headerExtensionsArr = new ConstraintsArray(); + for(RtpCapabilities.HeaderExtensionCapability headerExtension : capabilities.headerExtensions){ + ConstraintsMap headerExtensionMap = new ConstraintsMap(); + headerExtensionMap.putString("uri", headerExtension.getUri()); + headerExtensionMap.putInt("id", headerExtension.getPreferredId()); + headerExtensionMap.putBoolean("encrypted", headerExtension.getPreferredEncrypted()); + headerExtensionsArr.pushMap(headerExtensionMap); + } + capabilitiesMap.putArray("codecs", codecArr.toArrayList()); + capabilitiesMap.putArray("headerExtensions", headerExtensionsArr.toArrayList()); + ConstraintsArray fecMechanismsArr = new ConstraintsArray(); + capabilitiesMap.putArray("fecMechanisms", fecMechanismsArr.toArrayList()); + return capabilitiesMap; + } + private PeerConnection getPeerConnection(String id) { PeerConnectionObserver pco = mPeerConnectionObservers.get(id); return (pco == null) ? null : pco.getPeerConnection(); @@ -575,26 +1071,26 @@ private List createIceServers(ConstraintsArray iceServersArray) { for (int i = 0; i < size; i++) { ConstraintsMap iceServerMap = iceServersArray.getMap(i); boolean hasUsernameAndCredential = - iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); + iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); if (iceServerMap.hasKey("url")) { if (hasUsernameAndCredential) { iceServers.add(IceServer.builder(iceServerMap.getString("url")) - .setUsername(iceServerMap.getString("username")) - .setPassword(iceServerMap.getString("credential")).createIceServer()); + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")).createIceServer()); } else { iceServers.add( - IceServer.builder(iceServerMap.getString("url")).createIceServer()); + IceServer.builder(iceServerMap.getString("url")).createIceServer()); } } else if (iceServerMap.hasKey("urls")) { switch (iceServerMap.getType("urls")) { case String: if (hasUsernameAndCredential) { iceServers.add(IceServer.builder(iceServerMap.getString("urls")) - .setUsername(iceServerMap.getString("username")) - .setPassword(iceServerMap.getString("credential")).createIceServer()); + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")).createIceServer()); } else { iceServers.add(IceServer.builder(iceServerMap.getString("urls")) - .createIceServer()); + .createIceServer()); } break; case Array: @@ -609,8 +1105,8 @@ private List createIceServers(ConstraintsArray iceServersArray) { if (hasUsernameAndCredential) { builder - .setUsername(iceServerMap.getString("username")) - .setPassword(iceServerMap.getString("credential")); + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")); } iceServers.add(builder.createIceServer()); @@ -635,7 +1131,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // iceTransportPolicy (public api) if (map.hasKey("iceTransportPolicy") - && map.getType("iceTransportPolicy") == ObjectType.String) { + && map.getType("iceTransportPolicy") == ObjectType.String) { final String v = map.getString("iceTransportPolicy"); if (v != null) { switch (v) { @@ -657,7 +1153,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // bundlePolicy (public api) if (map.hasKey("bundlePolicy") - && map.getType("bundlePolicy") == ObjectType.String) { + && map.getType("bundlePolicy") == ObjectType.String) { final String v = map.getString("bundlePolicy"); if (v != null) { switch (v) { @@ -676,7 +1172,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // rtcpMuxPolicy (public api) if (map.hasKey("rtcpMuxPolicy") - && map.getType("rtcpMuxPolicy") == ObjectType.String) { + && map.getType("rtcpMuxPolicy") == ObjectType.String) { final String v = map.getString("rtcpMuxPolicy"); if (v != null) { switch (v) { @@ -695,7 +1191,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // iceCandidatePoolSize of type unsigned short, defaulting to 0 if (map.hasKey("iceCandidatePoolSize") - && map.getType("iceCandidatePoolSize") == ObjectType.Number) { + && map.getType("iceCandidatePoolSize") == ObjectType.Number) { final int v = map.getInt("iceCandidatePoolSize"); if (v > 0) { conf.iceCandidatePoolSize = v; @@ -704,7 +1200,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // sdpSemantics if (map.hasKey("sdpSemantics") - && map.getType("sdpSemantics") == ObjectType.String) { + && map.getType("sdpSemantics") == ObjectType.String) { final String v = map.getString("sdpSemantics"); if (v != null) { switch (v) { @@ -718,11 +1214,17 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { } } + // maxIPv6Networks + if (map.hasKey("maxIPv6Networks") + && map.getType("maxIPv6Networks") == ObjectType.Number) { + conf.maxIPv6Networks = map.getInt("maxIPv6Networks"); + } + // === below is private api in webrtc === // tcpCandidatePolicy (private api) if (map.hasKey("tcpCandidatePolicy") - && map.getType("tcpCandidatePolicy") == ObjectType.String) { + && map.getType("tcpCandidatePolicy") == ObjectType.String) { final String v = map.getString("tcpCandidatePolicy"); if (v != null) { switch (v) { @@ -738,7 +1240,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // candidateNetworkPolicy (private api) if (map.hasKey("candidateNetworkPolicy") - && map.getType("candidateNetworkPolicy") == ObjectType.String) { + && map.getType("candidateNetworkPolicy") == ObjectType.String) { final String v = map.getString("candidateNetworkPolicy"); if (v != null) { switch (v) { @@ -754,7 +1256,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // KeyType (private api) if (map.hasKey("keyType") - && map.getType("keyType") == ObjectType.String) { + && map.getType("keyType") == ObjectType.String) { final String v = map.getString("keyType"); if (v != null) { switch (v) { @@ -770,7 +1272,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // continualGatheringPolicy (private api) if (map.hasKey("continualGatheringPolicy") - && map.getType("continualGatheringPolicy") == ObjectType.String) { + && map.getType("continualGatheringPolicy") == ObjectType.String) { final String v = map.getString("continualGatheringPolicy"); if (v != null) { switch (v) { @@ -786,7 +1288,7 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // audioJitterBufferMaxPackets (private api) if (map.hasKey("audioJitterBufferMaxPackets") - && map.getType("audioJitterBufferMaxPackets") == ObjectType.Number) { + && map.getType("audioJitterBufferMaxPackets") == ObjectType.Number) { final int v = map.getInt("audioJitterBufferMaxPackets"); if (v > 0) { conf.audioJitterBufferMaxPackets = v; @@ -795,66 +1297,100 @@ private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { // iceConnectionReceivingTimeout (private api) if (map.hasKey("iceConnectionReceivingTimeout") - && map.getType("iceConnectionReceivingTimeout") == ObjectType.Number) { + && map.getType("iceConnectionReceivingTimeout") == ObjectType.Number) { final int v = map.getInt("iceConnectionReceivingTimeout"); conf.iceConnectionReceivingTimeout = v; } // iceBackupCandidatePairPingInterval (private api) if (map.hasKey("iceBackupCandidatePairPingInterval") - && map.getType("iceBackupCandidatePairPingInterval") == ObjectType.Number) { + && map.getType("iceBackupCandidatePairPingInterval") == ObjectType.Number) { final int v = map.getInt("iceBackupCandidatePairPingInterval"); conf.iceBackupCandidatePairPingInterval = v; } // audioJitterBufferFastAccelerate (private api) if (map.hasKey("audioJitterBufferFastAccelerate") - && map.getType("audioJitterBufferFastAccelerate") == ObjectType.Boolean) { + && map.getType("audioJitterBufferFastAccelerate") == ObjectType.Boolean) { final boolean v = map.getBoolean("audioJitterBufferFastAccelerate"); conf.audioJitterBufferFastAccelerate = v; } // pruneTurnPorts (private api) if (map.hasKey("pruneTurnPorts") - && map.getType("pruneTurnPorts") == ObjectType.Boolean) { + && map.getType("pruneTurnPorts") == ObjectType.Boolean) { final boolean v = map.getBoolean("pruneTurnPorts"); conf.pruneTurnPorts = v; } // presumeWritableWhenFullyRelayed (private api) if (map.hasKey("presumeWritableWhenFullyRelayed") - && map.getType("presumeWritableWhenFullyRelayed") == ObjectType.Boolean) { + && map.getType("presumeWritableWhenFullyRelayed") == ObjectType.Boolean) { final boolean v = map.getBoolean("presumeWritableWhenFullyRelayed"); conf.presumeWritableWhenFullyRelayed = v; } - + // cryptoOptions + if (map.hasKey("cryptoOptions") + && map.getType("cryptoOptions") == ObjectType.Map) { + final ConstraintsMap cryptoOptions = map.getMap("cryptoOptions"); + conf.cryptoOptions = CryptoOptions.builder() + .setEnableGcmCryptoSuites(cryptoOptions.hasKey("enableGcmCryptoSuites") && cryptoOptions.getBoolean("enableGcmCryptoSuites")) + .setRequireFrameEncryption(cryptoOptions.hasKey("requireFrameEncryption") && cryptoOptions.getBoolean("requireFrameEncryption")) + .setEnableEncryptedRtpHeaderExtensions(cryptoOptions.hasKey("enableEncryptedRtpHeaderExtensions") && cryptoOptions.getBoolean("enableEncryptedRtpHeaderExtensions")) + .setEnableAes128Sha1_32CryptoCipher(cryptoOptions.hasKey("enableAes128Sha1_32CryptoCipher") && cryptoOptions.getBoolean("enableAes128Sha1_32CryptoCipher")) + .createCryptoOptions(); + } + if (map.hasKey("enableCpuOveruseDetection") + && map.getType("enableCpuOveruseDetection") == ObjectType.Boolean) { + final boolean v = map.getBoolean("enableCpuOveruseDetection"); + conf.enableCpuOveruseDetection = v; + } return conf; } public String peerConnectionInit(ConstraintsMap configuration, ConstraintsMap constraints) { String peerConnectionId = getNextStreamUUID(); - PeerConnectionObserver observer = new PeerConnectionObserver(this, messenger, peerConnectionId); + RTCConfiguration conf = parseRTCConfiguration(configuration); + PeerConnectionObserver observer = new PeerConnectionObserver(conf, this, messenger, peerConnectionId); PeerConnection peerConnection - = mFactory.createPeerConnection( - parseRTCConfiguration(configuration), - parseMediaConstraints(constraints), - observer); + = mFactory.createPeerConnection( + conf, + parseMediaConstraints(constraints), + observer); observer.setPeerConnection(peerConnection); - if (mPeerConnectionObservers.size() == 0) { - audioManager.onAudioManagerRequested(true); - } mPeerConnectionObservers.put(peerConnectionId, observer); return peerConnectionId; } @Override - public Map getLocalStreams() { - return localStreams; + public boolean putLocalStream(String streamId, MediaStream stream) { + localStreams.put(streamId, stream); + return true; } @Override - public Map getLocalTracks() { - return localTracks; + public boolean putLocalTrack(String trackId, LocalTrack track) { + localTracks.put(trackId, track); + return true; + } + + @Override + public LocalTrack getLocalTrack(String trackId) { + return localTracks.get(trackId); + } + + public MediaStreamTrack getRemoteTrack(String trackId) { + for (Entry entry : mPeerConnectionObservers.entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + MediaStreamTrack track = pco.remoteTracks.get(trackId); + if (track == null) { + track = pco.getTransceiversTrack(trackId); + } + if (track != null) { + return track; + } + } + return null; } @Override @@ -874,7 +1410,7 @@ public String getNextTrackUUID() { do { uuid = UUID.randomUUID().toString(); - } while (getTrackForId(uuid) != null); + } while (getTrackForId(uuid, null) != null); return uuid; } @@ -884,48 +1420,76 @@ public PeerConnectionFactory getPeerConnectionFactory() { return mFactory; } + @Override + public PeerConnectionObserver getPeerConnectionObserver(String peerConnectionId) { + return mPeerConnectionObservers.get(peerConnectionId); + } + @Nullable @Override public Activity getActivity() { return activity; } - MediaStream getStreamForId(String id, String peerConnectionId) { - MediaStream stream = localStreams.get(id); + @Nullable + @Override + public Context getApplicationContext() { + return context; + } - if (stream == null) { - if (peerConnectionId.length() > 0) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + @Override + public BinaryMessenger getMessenger() { + return messenger; + } + + MediaStream getStreamForId(String id, String peerConnectionId) { + MediaStream stream = null; + if (peerConnectionId.length() > 0) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco != null) { stream = pco.remoteStreams.get(id); - } else { - for (Entry entry : mPeerConnectionObservers - .entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - stream = pco.remoteStreams.get(id); - if (stream != null) { - break; - } + } + } else { + for (Entry entry : mPeerConnectionObservers + .entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + stream = pco.remoteStreams.get(id); + if (stream != null) { + break; } } } + if (stream == null) { + stream = localStreams.get(id); + } return stream; } - private MediaStreamTrack getTrackForId(String trackId) { - MediaStreamTrack track = localTracks.get(trackId); - - if (track == null) { + public MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { + LocalTrack localTrack = localTracks.get(trackId); + MediaStreamTrack mediaStreamTrack = null; + if (localTrack == null) { for (Entry entry : mPeerConnectionObservers.entrySet()) { + if (peerConnectionId != null && entry.getKey().compareTo(peerConnectionId) != 0) + continue; + PeerConnectionObserver pco = entry.getValue(); - track = pco.remoteTracks.get(trackId); - if (track != null) { + mediaStreamTrack = pco.remoteTracks.get(trackId); + + if (mediaStreamTrack == null) { + mediaStreamTrack = pco.getTransceiversTrack(trackId); + } + + if (mediaStreamTrack != null) { break; } } + } else { + mediaStreamTrack = localTrack.track; } - return track; + return mediaStreamTrack; } @@ -938,9 +1502,7 @@ public void getUserMedia(ConstraintsMap constraints, Result result) { // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getUserMediaFailed", - "Failed to create new media stream", null); + resultError("getUserMediaFailed", "Failed to create new media stream", result); return; } @@ -956,9 +1518,7 @@ public void getDisplayMedia(ConstraintsMap constraints, Result result) { // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getDisplayMedia", - "Failed to create new media stream", null); + resultError("getDisplayMedia", "Failed to create new media stream", result); return; } @@ -976,16 +1536,45 @@ public void getSources(Result result) { } } - ConstraintsMap audio = new ConstraintsMap(); - audio.putString("label", "Audio"); - audio.putString("deviceId", "audio-1"); - audio.putString("facing", ""); - audio.putString("kind", "audioinput"); - array.pushMap(audio); - + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + ConstraintsMap audio = new ConstraintsMap(); + audio.putString("label", "Audio"); + audio.putString("deviceId", "audio-1"); + audio.putString("kind", "audioinput"); + audio.putString("groupId", "microphone"); + array.pushMap(audio); + } else { + android.media.AudioManager audioManager = ((android.media.AudioManager) context + .getSystemService(Context.AUDIO_SERVICE)); + final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); + for (int i = 0; i < devices.length; i++) { + AudioDeviceInfo device = devices[i]; + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC || device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO || + device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + ConstraintsMap audio = new ConstraintsMap(); + audio.putString("label", AudioUtils.getAudioDeviceLabel(device)); + audio.putString("deviceId", AudioUtils.getAudioDeviceId(device)); + audio.putString("groupId", AudioUtils.getAudioGroupId(device)); + audio.putString("kind", "audioinput"); + array.pushMap(audio); + } + } + } + + List audioOutputs = AudioSwitchManager.instance.availableAudioDevices(); + + for (AudioDevice audioOutput : audioOutputs) { + ConstraintsMap audioOutputMap = new ConstraintsMap(); + audioOutputMap.putString("label", audioOutput.getName()); + audioOutputMap.putString("deviceId", AudioDeviceKind.fromAudioDevice(audioOutput).typeName); + audioOutputMap.putString("groupId", "" + AudioDeviceKind.fromAudioDevice(audioOutput).typeName); + audioOutputMap.putString("kind", "audiooutput"); + array.pushMap(audioOutputMap); + } + ConstraintsMap map = new ConstraintsMap(); map.putArray("sources", array.toArrayList()); - + result.success(map.toMap()); } @@ -995,7 +1584,7 @@ private void createLocalMediaStream(Result result) { localStreams.put(streamId, mediaStream); if (mediaStream == null) { - result.error(/* type */ "createLocalMediaStream", "Failed to create new media stream", null); + resultError("createLocalMediaStream", "Failed to create new media stream", result); return; } Map resultMap = new HashMap<>(); @@ -1003,25 +1592,23 @@ private void createLocalMediaStream(Result result) { result.success(resultMap); } - public void mediaStreamTrackStop(final String id) { - // Is this functionality equivalent to `mediaStreamTrackRelease()` ? - // if so, we should merge this two and remove track from stream as well. - MediaStreamTrack track = localTracks.get(id); + public void trackDispose(final String trackId) { + LocalTrack track = localTracks.get(trackId); if (track == null) { - Log.d(TAG, "mediaStreamTrackStop() track is null"); + Log.d(TAG, "trackDispose() track is null"); return; } + removeTrackForRendererById(trackId); track.setEnabled(false); - if (track.kind().equals("video")) { - getUserMediaImpl.removeVideoCapturer(id); + if (track instanceof LocalVideoTrack) { + getUserMediaImpl.removeVideoCapturer(trackId); } - localTracks.remove(id); - // What exactly does `detached` mean in doc? - // see: https://www.w3.org/TR/mediacapture-streams/#track-detached + localTracks.remove(trackId); } - public void mediaStreamTrackSetEnabled(final String id, final boolean enabled) { - MediaStreamTrack track = localTracks.get(id); + public void mediaStreamTrackSetEnabled(final String id, final boolean enabled, String peerConnectionId) { + MediaStreamTrack track = getTrackForId(id, peerConnectionId); + if (track == null) { Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); return; @@ -1031,9 +1618,9 @@ public void mediaStreamTrackSetEnabled(final String id, final boolean enabled) { track.setEnabled(enabled); } - public void mediaStreamTrackSetVolume(final String id, final double volume) { - MediaStreamTrack track = localTracks.get(id); - if (track != null && track instanceof AudioTrack) { + public void mediaStreamTrackSetVolume(final String id, final double volume, String peerConnectionId) { + MediaStreamTrack track = getTrackForId(id, null); + if (track instanceof AudioTrack) { Log.d(TAG, "setVolume(): " + id + "," + volume); try { ((AudioTrack) track).setVolume(volume); @@ -1045,50 +1632,50 @@ public void mediaStreamTrackSetVolume(final String id, final double volume) { } } - public void mediaStreamAddTrack(final String streaemId, final String trackId, Result result) { - MediaStream mediaStream = localStreams.get(streaemId); + public void mediaStreamAddTrack(final String streamId, final String trackId, Result result) { + MediaStream mediaStream = localStreams.get(streamId); if (mediaStream != null) { - MediaStreamTrack track = localTracks.get(trackId); + MediaStreamTrack track = getTrackForId(trackId, null);//localTracks.get(trackId); if (track != null) { - if (track.kind().equals("audio")) { + String kind = track.kind(); + if (kind.equals("audio")) { mediaStream.addTrack((AudioTrack) track); - } else if (track.kind().equals("video")) { + result.success(null); + } else if (kind.equals("video")) { mediaStream.addTrack((VideoTrack) track); + result.success(null); + } else { + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] has unsupported type: " + kind, result); } } else { - String errorMsg = "mediaStreamAddTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } } else { - String errorMsg = "mediaStreamAddTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() stream [" + streamId + "] is null", result); } - result.success(null); } - public void mediaStreamRemoveTrack(final String streaemId, final String trackId, Result result) { - MediaStream mediaStream = localStreams.get(streaemId); + public void mediaStreamRemoveTrack(final String streamId, final String trackId, Result result) { + MediaStream mediaStream = localStreams.get(streamId); if (mediaStream != null) { - MediaStreamTrack track = localTracks.get(trackId); + LocalTrack track = localTracks.get(trackId); if (track != null) { - if (track.kind().equals("audio")) { - mediaStream.removeTrack((AudioTrack) track); - } else if (track.kind().equals("video")) { - mediaStream.removeTrack((VideoTrack) track); + String kind = track.kind(); + if (kind.equals("audio")) { + mediaStream.removeTrack((AudioTrack) track.track); + result.success(null); + } else if (kind.equals("video")) { + mediaStream.removeTrack((VideoTrack) track.track); + result.success(null); + } else { + resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() track [" + trackId + "] has unsupported type: " + kind, result); } } else { - String errorMsg = "mediaStreamRemoveTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); + resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() track [" + trackId + "] is null", result); } } else { - String errorMsg = "mediaStreamRemoveTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); + resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() stream [" + streamId + "] is null", result); } - result.success(null); } public void mediaStreamTrackRelease(final String streamId, final String _trackId) { @@ -1097,7 +1684,7 @@ public void mediaStreamTrackRelease(final String streamId, final String _trackId Log.d(TAG, "mediaStreamTrackRelease() stream is null"); return; } - MediaStreamTrack track = localTracks.get(_trackId); + LocalTrack track = localTracks.get(_trackId); if (track == null) { Log.d(TAG, "mediaStreamTrackRelease() track is null"); return; @@ -1105,9 +1692,9 @@ public void mediaStreamTrackRelease(final String streamId, final String _trackId track.setEnabled(false); // should we do this? localTracks.remove(_trackId); if (track.kind().equals("audio")) { - stream.removeTrack((AudioTrack) track); + stream.removeTrack((AudioTrack) track.track); } else if (track.kind().equals("video")) { - stream.removeTrack((VideoTrack) track); + stream.removeTrack((VideoTrack) track.track); getUserMediaImpl.removeVideoCapturer(_trackId); } } @@ -1124,10 +1711,11 @@ public ConstraintsMap getCameraInfo(int index) { ConstraintsMap params = new ConstraintsMap(); String facing = info.facing == 1 ? "front" : "back"; params.putString("label", - "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); + "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); params.putString("deviceId", "" + index); params.putString("facing", facing); params.putString("kind", "videoinput"); + params.putString("groupId", "camera"); return params; } @@ -1141,7 +1729,7 @@ private MediaConstraints defaultConstraints() { } public void peerConnectionSetConfiguration(ConstraintsMap configuration, - PeerConnection peerConnection) { + PeerConnection peerConnection) { if (peerConnection == null) { Log.d(TAG, "peerConnectionSetConfiguration() peerConnection is null"); return; @@ -1161,9 +1749,7 @@ public void peerConnectionAddStream(final String streamId, final String id, Resu Log.d(TAG, "addStream" + result); result.success(res); } else { - Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); - result.error("peerConnectionAddStreamFailed", - "peerConnectionAddStream() peerConnection is null", null); + resultError("peerConnectionAddStream", "peerConnection is null", result); } } @@ -1178,23 +1764,21 @@ public void peerConnectionRemoveStream(final String streamId, final String id, R peerConnection.removeStream(mediaStream); result.success(null); } else { - Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); - result.error("peerConnectionRemoveStreamFailed", - "peerConnectionAddStream() peerConnection is null", null); + resultError("peerConnectionRemoveStream", "peerConnection is null", result); } } public void peerConnectionCreateOffer( - String id, - ConstraintsMap constraints, - final Result result) { + String id, + ConstraintsMap constraints, + final Result result) { PeerConnection peerConnection = getPeerConnection(id); if (peerConnection != null) { peerConnection.createOffer(new SdpObserver() { @Override public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_OFFER_ERROR", s, null); + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR: " + s, result); } @Override @@ -1214,22 +1798,21 @@ public void onSetSuccess() { } }, parseMediaConstraints(constraints)); } else { - Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); - result.error("WEBRTC_CREATE_OFFER_ERROR", "peerConnection is null", null); + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR", result); } } public void peerConnectionCreateAnswer( - String id, - ConstraintsMap constraints, - final Result result) { + String id, + ConstraintsMap constraints, + final Result result) { PeerConnection peerConnection = getPeerConnection(id); if (peerConnection != null) { peerConnection.createAnswer(new SdpObserver() { @Override public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_ANSWER_ERROR", s, null); + resultError("peerConnectionCreateAnswer", "WEBRTC_CREATE_ANSWER_ERROR: " + s, result); } @Override @@ -1249,20 +1832,17 @@ public void onSetSuccess() { } }, parseMediaConstraints(constraints)); } else { - Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); - result.error("WEBRTC_CREATE_ANSWER_ERROR", "peerConnection is null", null); + resultError("peerConnectionCreateAnswer", "peerConnection is null", result); } } public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, - final Result result) { + final Result result) { PeerConnection peerConnection = getPeerConnection(id); - - Log.d(TAG, "peerConnectionSetLocalDescription() start"); if (peerConnection != null) { SessionDescription sdp = new SessionDescription( - Type.fromCanonicalForm(sdpMap.getString("type")), - sdpMap.getString("sdp") + Type.fromCanonicalForm(sdpMap.getString("type")), + sdpMap.getString("sdp") ); peerConnection.setLocalDescription(new SdpObserver() { @@ -1281,26 +1861,21 @@ public void onCreateFailure(String s) { @Override public void onSetFailure(String s) { - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", s, null); + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: " + s, result); } }, sdp); } else { - Log.d(TAG, "peerConnectionSetLocalDescription() peerConnection is null"); - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", "peerConnection is null", null); + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: peerConnection is null", result); } - Log.d(TAG, "peerConnectionSetLocalDescription() end"); } public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, - final Result result) { + final Result result) { PeerConnection peerConnection = getPeerConnection(id); - // final String d = sdpMap.getString("type"); - - Log.d(TAG, "peerConnectionSetRemoteDescription() start"); if (peerConnection != null) { SessionDescription sdp = new SessionDescription( - Type.fromCanonicalForm(sdpMap.getString("type")), - sdpMap.getString("sdp") + Type.fromCanonicalForm(sdpMap.getString("type")), + sdpMap.getString("sdp") ); peerConnection.setRemoteDescription(new SdpObserver() { @@ -1319,43 +1894,53 @@ public void onCreateFailure(String s) { @Override public void onSetFailure(String s) { - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", s, null); + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: " + s, result); } }, sdp); } else { - Log.d(TAG, "peerConnectionSetRemoteDescription() peerConnection is null"); - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", "peerConnection is null", null); + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: peerConnection is null", result); } - Log.d(TAG, "peerConnectionSetRemoteDescription() end"); } public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, - final Result result) { + final Result result) { boolean res = false; PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "peerConnectionAddICECandidate() start"); if (peerConnection != null) { + int sdpMLineIndex = 0; + if (!candidateMap.isNull("sdpMLineIndex")) { + sdpMLineIndex = candidateMap.getInt("sdpMLineIndex"); + } IceCandidate candidate = new IceCandidate( candidateMap.getString("sdpMid"), - candidateMap.getInt("sdpMLineIndex"), - candidateMap.getString("candidate") - ); + sdpMLineIndex, + candidateMap.getString("candidate")); res = peerConnection.addIceCandidate(candidate); } else { - Log.d(TAG, "peerConnectionAddICECandidate() peerConnection is null"); - result.error("peerConnectionAddICECandidateFailed", - "peerConnectionAddICECandidate() peerConnection is null", null); + resultError("peerConnectionAddICECandidate", "peerConnection is null", result); } result.success(res); - Log.d(TAG, "peerConnectionAddICECandidate() end"); } public void peerConnectionGetStats(String trackId, String id, final Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(id); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); + resultError("peerConnectionGetStats", "peerConnection is null", result); + } else { + if(trackId == null || trackId.isEmpty()) { + pco.getStats(result); + } else { + pco.getStatsForTrack(trackId, result); + } + } + } + + public void restartIce(final String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "restartIce() peerConnection is null"); } else { - pco.getStats(trackId, result); + pco.restartIce(); } } @@ -1370,39 +1955,78 @@ public void peerConnectionClose(final String id) { public void peerConnectionDispose(final String id) { PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - if (pco == null || pco.getPeerConnection() == null) { + if (pco != null) { + if (peerConnectionDispose(pco)) { + + mPeerConnectionObservers.remove(id); + } + } else { + Log.d(TAG, "peerConnectionDispose() peerConnectionObserver is null"); + } + if (mPeerConnectionObservers.size() == 0) { + AudioSwitchManager.instance.stop(); + } + } + + public boolean peerConnectionDispose(final PeerConnectionObserver pco) { + if (pco.getPeerConnection() == null) { Log.d(TAG, "peerConnectionDispose() peerConnection is null"); } else { pco.dispose(); - mPeerConnectionObservers.remove(id); + return true; } - if (mPeerConnectionObservers.size() == 0) { - audioManager.onAudioManagerRequested(false); + return false; + } + + public void streamDispose(final String streamId) { + MediaStream stream = localStreams.get(streamId); + if (stream != null) { + streamDispose(stream); + localStreams.remove(streamId); + removeStreamForRendererById(streamId); + } else { + Log.d(TAG, "streamDispose() mediaStream is null"); } } - public void mediaStreamRelease(final String id) { - MediaStream mediaStream = localStreams.get(id); - if (mediaStream != null) { - for (VideoTrack track : mediaStream.videoTracks) { - localTracks.remove(track.id()); - getUserMediaImpl.removeVideoCapturer(track.id()); + public void streamDispose(final MediaStream stream) { + List videoTracks = stream.videoTracks; + for (VideoTrack track : videoTracks) { + localTracks.remove(track.id()); + getUserMediaImpl.removeVideoCapturer(track.id()); + stream.removeTrack(track); + } + List audioTracks = stream.audioTracks; + for (AudioTrack track : audioTracks) { + localTracks.remove(track.id()); + stream.removeTrack(track); + } + } + + private void removeStreamForRendererById(String streamId) { + for (int i = 0; i < renders.size(); i++) { + FlutterRTCVideoRenderer renderer = renders.valueAt(i); + if (renderer.checkMediaStream(streamId, "local")) { + renderer.setStream(null, ""); } - for (AudioTrack track : mediaStream.audioTracks) { - localTracks.remove(track.id()); + } + } + + private void removeTrackForRendererById(String trackId) { + for (int i = 0; i < renders.size(); i++) { + FlutterRTCVideoRenderer renderer = renders.valueAt(i); + if (renderer.checkVideoTrack(trackId, "local")) { + renderer.setStream(null, null); } - localStreams.remove(id); - } else { - Log.d(TAG, "mediaStreamRelease() mediaStream is null"); } } public void createDataChannel(final String peerConnectionId, String label, ConstraintsMap config, - Result result) { + Result result) { // Forward to PeerConnectionObserver which deals with DataChannels // because DataChannel is owned by PeerConnection. PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); + = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { Log.d(TAG, "createDataChannel() peerConnection is null"); } else { @@ -1410,12 +2034,12 @@ public void createDataChannel(final String peerConnectionId, String label, Const } } - public void dataChannelSend(String peerConnectionId, int dataChannelId, ByteBuffer bytebuffer, - Boolean isBinary) { + public void dataChannelSend(String peerConnectionId, String dataChannelId, ByteBuffer bytebuffer, + Boolean isBinary) { // Forward to PeerConnectionObserver which deals with DataChannels // because DataChannel is owned by PeerConnection. PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); + = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { Log.d(TAG, "dataChannelSend() peerConnection is null"); } else { @@ -1423,11 +2047,22 @@ public void dataChannelSend(String peerConnectionId, int dataChannelId, ByteBuff } } - public void dataChannelClose(String peerConnectionId, int dataChannelId) { + public void dataChannelGetBufferedAmount(String peerConnectionId, String dataChannelId, Result result) { + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "dataChannelGetBufferedAmount() peerConnection is null"); + resultError("dataChannelGetBufferedAmount", "peerConnection is null", result); + } else { + pco.dataChannelGetBufferedAmount(dataChannelId, result); + } + } + + public void dataChannelClose(String peerConnectionId, String dataChannelId) { // Forward to PeerConnectionObserver which deals with DataChannels // because DataChannel is owned by PeerConnection. PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); + = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { Log.d(TAG, "dataChannelClose() peerConnection is null"); } else { @@ -1438,4 +2073,221 @@ public void dataChannelClose(String peerConnectionId, int dataChannelId) { public void setActivity(Activity activity) { this.activity = activity; } + + public void addTrack(String peerConnectionId, String trackId, List streamIds, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + LocalTrack track = localTracks.get(trackId); + if (track == null) { + resultError("addTrack", "track is null", result); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTrack", "peerConnection is null", result); + } else { + pco.addTrack(track.track, streamIds, result); + } + } + + public void removeTrack(String peerConnectionId, String senderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("removeTrack", "peerConnection is null", result); + } else { + pco.removeTrack(senderId, result); + } + } + + public void addTransceiver(String peerConnectionId, String trackId, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + LocalTrack track = localTracks.get(trackId); + if (track == null) { + resultError("addTransceiver", "track is null", result); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTransceiver", "peerConnection is null", result); + } else { + pco.addTransceiver(track.track, transceiverInit, result); + } + } + + public void addTransceiverOfType(String peerConnectionId, String mediaType, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTransceiverOfType", "peerConnection is null", result); + } else { + pco.addTransceiverOfType(mediaType, transceiverInit, result); + } + } + + public void rtpTransceiverSetDirection(String peerConnectionId, String direction, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverSetDirection(direction, transceiverId, result); + } + } + + public void rtpTransceiverSetCodecPreferences(String peerConnectionId, String transceiverId, List> codecs, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("setCodecPreferences", "peerConnection is null", result); + } else { + pco.rtpTransceiverSetCodecPreferences(transceiverId, codecs, result); + } + } + + public void rtpTransceiverGetDirection(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverGetDirection(transceiverId, result); + } + } + + public void rtpTransceiverGetCurrentDirection(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverGetCurrentDirection(transceiverId, result); + } + } + + public void rtpTransceiverStop(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverStop", "peerConnection is null", result); + } else { + pco.rtpTransceiverStop(transceiverId, result); + } + } + + public void rtpSenderSetParameters(String peerConnectionId, String rtpSenderId, Map parameters, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetParameters", "peerConnection is null", result); + } else { + pco.rtpSenderSetParameters(rtpSenderId, parameters, result); + } + } + + public void getSenders(String peerConnectionId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("getSenders", "peerConnection is null", result); + } else { + pco.getSenders(result); + } + } + + public void getReceivers(String peerConnectionId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("getReceivers", "peerConnection is null", result); + } else { + pco.getReceivers(result); + } + } + + public void getTransceivers(String peerConnectionId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("getTransceivers", "peerConnection is null", result); + } else { + pco.getTransceivers(result); + } + } + + public void rtpSenderSetTrack(String peerConnectionId, String rtpSenderId, String trackId, boolean replace, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetTrack", "peerConnection is null", result); + } else { + MediaStreamTrack mediaStreamTrack = null; + LocalTrack track = localTracks.get(trackId); + if (trackId.length() > 0) { + if (track == null) { + resultError("rtpSenderSetTrack", "track is null", result); + return; + } + } + + if(track != null) { + mediaStreamTrack = track.track; + } + pco.rtpSenderSetTrack(rtpSenderId, mediaStreamTrack, result, replace); + } + } + + public void rtpSenderSetStreams(String peerConnectionId, String rtpSenderId, List streamIds, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetStreams", "peerConnection is null", result); + } else { + pco.rtpSenderSetStreams(rtpSenderId, streamIds, result); + } + } + + + public void reStartCamera() { + if (null == getUserMediaImpl) { + return; + } + getUserMediaImpl.reStartCamera(new GetUserMediaImpl.IsCameraEnabled() { + @Override + public boolean isEnabled(String id) { + if (!localTracks.containsKey(id)) { + return false; + } + return localTracks.get(id).enabled(); + } + }); + } + + @RequiresApi(api = Build.VERSION_CODES.M) + void requestPermissions( + final ArrayList permissions, + final Callback successCallback, + final Callback errorCallback) { + PermissionUtils.Callback callback = + (permissions_, grantResults) -> { + List grantedPermissions = new ArrayList<>(); + List deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < permissions_.length; ++i) { + String permission = permissions_[i]; + int grantResult = grantResults[i]; + + if (grantResult == PackageManager.PERMISSION_GRANTED) { + grantedPermissions.add(permission); + } else { + deniedPermissions.add(permission); + } + } + + // Success means that all requested permissions were granted. + for (String p : permissions) { + if (!grantedPermissions.contains(p)) { + // According to step 6 of the getUserMedia() algorithm + // "if the result is denied, jump to the step Permission + // Failure." + errorCallback.invoke(deniedPermissions); + return; + } + } + successCallback.invoke(grantedPermissions); + }; + + final Activity activity = getActivity(); + final Context context = getApplicationContext(); + PermissionUtils.requestPermissions( + context, + activity, + permissions.toArray(new String[permissions.size()]), callback); + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java b/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java new file mode 100644 index 0000000000..7bee5d0dc2 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java @@ -0,0 +1,231 @@ +package com.cloudwebrtc.webrtc; + +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.CapturerObserver; +import org.webrtc.ThreadUtils; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; + +import android.annotation.TargetApi; +import android.content.Context; +import android.content.Intent; +import android.media.projection.MediaProjection; +import android.view.Surface; +import android.view.WindowManager; +import android.app.Activity; +import android.hardware.display.DisplayManager; +import android.util.DisplayMetrics; +import android.hardware.display.VirtualDisplay; +import android.media.projection.MediaProjectionManager; +import android.os.Looper; +import android.os.Handler; +import android.os.Build; +import android.view.Display; + +/** + * An copy of ScreenCapturerAndroid to capture the screen content while being aware of device orientation + */ +@TargetApi(21) +public class OrientationAwareScreenCapturer implements VideoCapturer, VideoSink { + private static final int DISPLAY_FLAGS = + DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION; + // DPI for VirtualDisplay, does not seem to matter for us. + private static final int VIRTUAL_DISPLAY_DPI = 400; + private final Intent mediaProjectionPermissionResultData; + private final MediaProjection.Callback mediaProjectionCallback; + private int width; + private int height; + private int oldWidth; + private int oldHeight; + private VirtualDisplay virtualDisplay; + private SurfaceTextureHelper surfaceTextureHelper; + private CapturerObserver capturerObserver; + private long numCapturedFrames = 0; + private MediaProjection mediaProjection; + private boolean isDisposed = false; + private MediaProjectionManager mediaProjectionManager; + private WindowManager windowManager; + private boolean isPortrait; + + /** + * Constructs a new Screen Capturer. + * + * @param mediaProjectionPermissionResultData the result data of MediaProjection permission + * activity; the calling app must validate that result code is Activity.RESULT_OK before + * calling this method. + * @param mediaProjectionCallback MediaProjection callback to implement application specific + * logic in events such as when the user revokes a previously granted capture permission. + **/ + public OrientationAwareScreenCapturer(Intent mediaProjectionPermissionResultData, + MediaProjection.Callback mediaProjectionCallback) { + this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData; + this.mediaProjectionCallback = mediaProjectionCallback; + } + + public void onFrame(VideoFrame frame) { + checkNotDisposed(); + this.isPortrait = isDeviceOrientationPortrait(); + final int max = Math.max(this.height, this.width); + final int min = Math.min(this.height, this.width); + if (this.isPortrait) { + changeCaptureFormat(min, max, 15); + } else { + changeCaptureFormat(max, min, 15); + } + capturerObserver.onFrameCaptured(frame); + } + + private boolean isDeviceOrientationPortrait() { + final Display display = windowManager.getDefaultDisplay(); + final DisplayMetrics metrics = new DisplayMetrics(); + display.getRealMetrics(metrics); + + return metrics.heightPixels > metrics.widthPixels; + } + + + private void checkNotDisposed() { + if (isDisposed) { + throw new RuntimeException("capturer is disposed."); + } + } + + public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper, + final Context applicationContext, final CapturerObserver capturerObserver) { + checkNotDisposed(); + if (capturerObserver == null) { + throw new RuntimeException("capturerObserver not set."); + } + this.capturerObserver = capturerObserver; + if (surfaceTextureHelper == null) { + throw new RuntimeException("surfaceTextureHelper not set."); + } + this.surfaceTextureHelper = surfaceTextureHelper; + + this.windowManager = (WindowManager) applicationContext.getSystemService( + Context.WINDOW_SERVICE); + this.mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService( + Context.MEDIA_PROJECTION_SERVICE); + } + + @Override + public synchronized void startCapture( + final int width, final int height, final int ignoredFramerate) { + //checkNotDisposed(); + + this.isPortrait = isDeviceOrientationPortrait(); + if (this.isPortrait) { + this.width = width; + this.height = height; + } else { + this.height = width; + this.width = height; + } + + mediaProjection = mediaProjectionManager.getMediaProjection( + Activity.RESULT_OK, mediaProjectionPermissionResultData); + + // Let MediaProjection callback use the SurfaceTextureHelper thread. + mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler()); + + createVirtualDisplay(); + capturerObserver.onCapturerStarted(true); + surfaceTextureHelper.startListening(this); + } + + @Override + public synchronized void stopCapture() { + checkNotDisposed(); + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + surfaceTextureHelper.stopListening(); + capturerObserver.onCapturerStopped(); + if (virtualDisplay != null) { + virtualDisplay.release(); + virtualDisplay = null; + } + if (mediaProjection != null) { + // Unregister the callback before stopping, otherwise the callback recursively + // calls this method. + mediaProjection.unregisterCallback(mediaProjectionCallback); + mediaProjection.stop(); + mediaProjection = null; + } + } + }); + } + + @Override + public synchronized void dispose() { + isDisposed = true; + } + + /** + * Changes output video format. This method can be used to scale the output + * video, or to change orientation when the captured screen is rotated for example. + * + * @param width new output video width + * @param height new output video height + * @param ignoredFramerate ignored + */ + @Override + public synchronized void changeCaptureFormat( + final int width, final int height, final int ignoredFramerate) { + checkNotDisposed(); + if (this.oldWidth != width || this.oldHeight != height) { + this.oldWidth = width; + this.oldHeight = height; + + if (oldHeight > oldWidth) { + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + if (virtualDisplay != null && surfaceTextureHelper != null) { + virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); + surfaceTextureHelper.setTextureSize(oldWidth, oldHeight); + virtualDisplay.resize(oldWidth, oldHeight, VIRTUAL_DISPLAY_DPI); + } + } + }); + } + + if (oldWidth > oldHeight) { + surfaceTextureHelper.setTextureSize(oldWidth, oldHeight); + virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); + final Handler handler = new Handler(Looper.getMainLooper()); + handler.postDelayed(new Runnable() { + @Override + public void run() { + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + if (virtualDisplay != null && surfaceTextureHelper != null) { + virtualDisplay.resize(oldWidth, oldHeight, VIRTUAL_DISPLAY_DPI); + } + } + }); + } + }, 700); + } + } + } + + private void createVirtualDisplay() { + surfaceTextureHelper.setTextureSize(width, height); + surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); + virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, + VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), + null /* callback */, null /* callback handler */); + } + + @Override + public boolean isScreencast() { + return true; + } + + public long getNumCapturedFrames() { + return numCapturedFrames; + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 532718ac7b..9c36dce354 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -1,53 +1,79 @@ package com.cloudwebrtc.webrtc; import android.util.Log; -import android.util.SparseArray; + import androidx.annotation.Nullable; + +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsArray; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.Utils; + import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel.Result; + +import java.lang.reflect.Field; +import java.math.BigInteger; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.UUID; + import org.webrtc.AudioTrack; +import org.webrtc.CandidatePairChangeEvent; import org.webrtc.DataChannel; +import org.webrtc.DtmfSender; import org.webrtc.IceCandidate; import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnection; +import org.webrtc.RTCStats; +import org.webrtc.RTCStatsReport; +import org.webrtc.RtpCapabilities; +import org.webrtc.RtpParameters; import org.webrtc.RtpReceiver; -import org.webrtc.StatsObserver; -import org.webrtc.StatsReport; +import org.webrtc.RtpSender; +import org.webrtc.RtpTransceiver; import org.webrtc.VideoTrack; class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.StreamHandler { - private final static String TAG = FlutterWebRTCPlugin.TAG; - - private final SparseArray dataChannels = new SparseArray<>(); - private BinaryMessenger messenger; + private final Map dataChannels = new HashMap<>(); + private final BinaryMessenger messenger; private final String id; private PeerConnection peerConnection; + private final PeerConnection.RTCConfiguration configuration; final Map remoteStreams = new HashMap<>(); final Map remoteTracks = new HashMap<>(); + final Map transceivers = new HashMap<>(); private final StateProvider stateProvider; - private final EventChannel eventChannel; private EventChannel.EventSink eventSink; - PeerConnectionObserver(StateProvider stateProvider, BinaryMessenger messenger, String id) { + PeerConnectionObserver(PeerConnection.RTCConfiguration configuration, StateProvider stateProvider, BinaryMessenger messenger, String id) { + this.configuration = configuration; this.stateProvider = stateProvider; this.messenger = messenger; this.id = id; - eventChannel = new EventChannel(messenger, "FlutterWebRTC/peerConnectoinEvent" + id); + eventChannel = new EventChannel(messenger, "FlutterWebRTC/peerConnectionEvent" + id); eventChannel.setStreamHandler(this); } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + @Override public void onListen(Object o, EventChannel.EventSink sink) { eventSink = new AnyThreadSink(sink); @@ -58,7 +84,7 @@ public void onCancel(Object o) { eventSink = null; } - PeerConnection getPeerConnection() { + public PeerConnection getPeerConnection() { return peerConnection; } @@ -66,6 +92,10 @@ void setPeerConnection(PeerConnection peerConnection) { this.peerConnection = peerConnection; } + void restartIce() { + peerConnection.restartIce(); + } + void close() { peerConnection.close(); remoteStreams.clear(); @@ -75,7 +105,6 @@ void close() { void dispose() { this.close(); - peerConnection.dispose(); eventChannel.setStreamHandler(null); } @@ -89,9 +118,6 @@ void createDataChannel(String label, ConstraintsMap config, Result result) { if (config.hasKey("ordered")) { init.ordered = config.getBoolean("ordered"); } - if (config.hasKey("maxRetransmitTime")) { - init.maxRetransmitTimeMs = config.getInt("maxRetransmitTime"); - } if (config.hasKey("maxRetransmits")) { init.maxRetransmits = config.getInt("maxRetransmits"); } @@ -107,23 +133,22 @@ void createDataChannel(String label, ConstraintsMap config, Result result) { // been deprecated in Chromium, and Google have decided (in 2015) to no // longer support them (in the face of multiple reported issues of // breakages). - int dataChannelId = init.id; - if (dataChannel != null && -1 != dataChannelId) { - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); + String flutterId = getNextDataChannelUUID(); + if (dataChannel != null) { + dataChannels.put(flutterId, dataChannel); + registerDataChannelObserver(flutterId, dataChannel); ConstraintsMap params = new ConstraintsMap(); params.putInt("id", dataChannel.id()); params.putString("label", dataChannel.label()); + params.putString("flutterId", flutterId); result.success(params.toMap()); } else { - result.error("createDataChannel", - "Can't create data-channel for id: " + dataChannelId, - null); + resultError("createDataChannel", "Can't create data-channel for id: " + init.id, result); } } - void dataChannelClose(int dataChannelId) { + void dataChannelClose(String dataChannelId) { DataChannel dataChannel = dataChannels.get(dataChannelId); if (dataChannel != null) { dataChannel.close(); @@ -133,7 +158,7 @@ void dataChannelClose(int dataChannelId) { } } - void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { + void dataChannelSend(String dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { DataChannel dataChannel = dataChannels.get(dataChannelId); if (dataChannel != null) { DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); @@ -143,63 +168,171 @@ void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) } } - void getStats(String trackId, final Result result) { - MediaStreamTrack track = null; - if (trackId == null - || trackId.isEmpty() - || (track = stateProvider.getLocalTracks().get(trackId)) != null - || (track = remoteTracks.get(trackId)) != null) { - peerConnection.getStats( - new StatsObserver() { - @Override - public void onComplete(StatsReport[] reports) { - - final int reportCount = reports.length; - ConstraintsMap params = new ConstraintsMap(); - ConstraintsArray stats = new ConstraintsArray(); - - for (int i = 0; i < reportCount; ++i) { - StatsReport report = reports[i]; - ConstraintsMap report_map = new ConstraintsMap(); - - report_map.putString("id", report.id); - report_map.putString("type", report.type); - report_map.putDouble("timestamp", report.timestamp); - - StatsReport.Value[] values = report.values; - ConstraintsMap v_map = new ConstraintsMap(); - final int valueCount = values.length; - for (int j = 0; j < valueCount; ++j) { - StatsReport.Value v = values[j]; - v_map.putString(v.name, v.value); - } + void dataChannelGetBufferedAmount(String dataChannelId, Result result) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + ConstraintsMap params = new ConstraintsMap(); + params.putLong("bufferedAmount", dataChannel.bufferedAmount()); + result.success(params.toMap()); + } else { + Log.d(TAG, "dataChannelGetBufferedAmount() dataChannel is null"); + resultError("dataChannelGetBufferedAmount", "DataChannel is null", result); + } + } + + RtpTransceiver getRtpTransceiverById(String id) { + RtpTransceiver transceiver = transceivers.get(id); + if (null == transceiver) { + List transceivers = peerConnection.getTransceivers(); + for (RtpTransceiver t : transceivers) { + if (id.equals(t.getMid())) { + transceiver = t; + } + } + } + return transceiver; + } + + RtpSender getRtpSenderById(String id) { + List senders = peerConnection.getSenders(); + for (RtpSender sender : senders) { + if (id.equals(sender.id())) { + return sender; + } + } + return null; + } + + RtpReceiver getRtpReceiverById(String id) { + List receivers = peerConnection.getReceivers(); + for (RtpReceiver receiver : receivers) { + if (id.equals(receiver.id())) { + return receiver; + } + } + return null; + } - report_map.putMap("values", v_map.toMap()); - stats.pushMap(report_map); + void handleStatsReport(RTCStatsReport rtcStatsReport, Result result) { + Map reports = rtcStatsReport.getStatsMap(); + ConstraintsMap params = new ConstraintsMap(); + ConstraintsArray stats = new ConstraintsArray(); + + for (RTCStats report : reports.values()) { + ConstraintsMap report_map = new ConstraintsMap(); + + report_map.putString("id", report.getId()); + report_map.putString("type", report.getType()); + report_map.putDouble("timestamp", report.getTimestampUs()); + + Map values = report.getMembers(); + ConstraintsMap v_map = new ConstraintsMap(); + for (String key : values.keySet()) { + Object v = values.get(key); + if(v instanceof String) { + v_map.putString(key, (String)v); + } else if(v instanceof String[]) { + ConstraintsArray arr = new ConstraintsArray(); + for(String s : (String[])v) { + arr.pushString(s); + } + v_map.putArray(key, arr.toArrayList()); + } else if(v instanceof Integer) { + v_map.putInt(key, (Integer)v); + } else if(v instanceof Long) { + v_map.putLong(key, (Long)v); + } else if(v instanceof Double) { + v_map.putDouble(key, (Double)v); + } else if(v instanceof Boolean) { + v_map.putBoolean(key, (Boolean)v); + } else if(v instanceof BigInteger){ + v_map.putLong(key, ((BigInteger)v).longValue()); + } else if(v instanceof LinkedHashMap) { + ConstraintsMap m = new ConstraintsMap(); + for(Map.Entry entry : ((LinkedHashMap)v).entrySet()) { + Object value = entry.getValue(); + if(value instanceof String) { + m.putString(entry.getKey(), (String)value); + } else if(value instanceof Integer) { + m.putInt(entry.getKey(), (Integer)value); + } else if(value instanceof Long) { + m.putLong(entry.getKey(), (Long)value); + } else if(value instanceof Double) { + m.putDouble(entry.getKey(), (Double)value); + } else if(value instanceof Boolean) { + m.putBoolean(entry.getKey(), (Boolean)value); + } else if(value instanceof BigInteger) { + m.putLong(entry.getKey(), ((BigInteger)value).longValue()); + } else { + Log.d(TAG, "getStats() unknown type: " + value.getClass().getName() + " for [" + entry.getKey() + "] value: " + value); + } + } + v_map.putMap(key, m.toMap()); + } else { + Log.d(TAG, "getStats() unknown type: " + v.getClass().getName() + " for [" + key + "] value: " + v); } + } + report_map.putMap("values", v_map.toMap()); + stats.pushMap(report_map); + } - params.putArray("stats", stats.toArrayList()); - result.success(params.toMap()); - } - }, - track); + params.putArray("stats", stats.toArrayList()); + result.success(params.toMap()); + } + + void getStatsForTrack(String trackId, Result result) { + if (trackId == null || trackId.isEmpty()) { + resultError("peerConnectionGetStats", "MediaStreamTrack not found for id: " + trackId, result); + return; + } + + RtpSender sender = null; + RtpReceiver receiver = null; + for (RtpSender s : peerConnection.getSenders()) { + if (s.track() != null && trackId.equals(s.track().id())) { + sender = s; + break; + } + } + for (RtpReceiver r : peerConnection.getReceivers()) { + if (r.track() != null && trackId.equals(r.track().id())) { + receiver = r; + break; + } + } + if (sender != null) { + peerConnection.getStats(sender, rtcStatsReport -> handleStatsReport(rtcStatsReport, result)); + } else if (receiver != null) { + peerConnection.getStats(receiver, rtcStatsReport -> handleStatsReport(rtcStatsReport, result)); } else { - Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); - result.error("peerConnectionGetStats", - "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId, - null); + resultError("peerConnectionGetStats", "MediaStreamTrack not found for id: " + trackId, result); } } + void getStats(final Result result) { + peerConnection.getStats( + rtcStatsReport -> handleStatsReport(rtcStatsReport, result)); + } + @Override public void onIceCandidate(final IceCandidate candidate) { Log.d(TAG, "onIceCandidate"); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onCandidate"); + params.putMap("candidate", candidateToMap(candidate)); + sendEvent(params); + } + + @Override + public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { + Log.d(TAG, "onSelectedCandidatePairChanged"); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onSelectedCandidatePairChanged"); ConstraintsMap candidateParams = new ConstraintsMap(); - candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); - candidateParams.putString("sdpMid", candidate.sdpMid); - candidateParams.putString("candidate", candidate.sdp); + candidateParams.putInt("lastDataReceivedMs", event.lastDataReceivedMs); + candidateParams.putMap("local", candidateToMap(event.local)); + candidateParams.putMap("remote", candidateToMap(event.remote)); + candidateParams.putString("reason", event.reason); params.putMap("candidate", candidateParams.toMap()); sendEvent(params); } @@ -213,10 +346,15 @@ public void onIceCandidatesRemoved(final IceCandidate[] candidates) { public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "iceConnectionState"); - params.putString("state", iceConnectionStateString(iceConnectionState)); + params.putString("state", Utils.iceConnectionStateString(iceConnectionState)); sendEvent(params); } + @Override + public void onStandardizedIceConnectionChange(PeerConnection.IceConnectionState newState) { + + } + @Override public void onIceConnectionReceivingChange(boolean var1) { } @@ -226,14 +364,14 @@ public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringSt Log.d(TAG, "onIceGatheringChange" + iceGatheringState.name()); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "iceGatheringState"); - params.putString("state", iceGatheringStateString(iceGatheringState)); + params.putString("state", Utils.iceGatheringStateString(iceGatheringState)); sendEvent(params); } private String getUIDForStream(MediaStream mediaStream) { for (Iterator> i - = remoteStreams.entrySet().iterator(); - i.hasNext(); ) { + = remoteStreams.entrySet().iterator(); + i.hasNext(); ) { Map.Entry e = i.next(); if (e.getValue().equals(mediaStream)) { return e.getKey(); @@ -267,6 +405,7 @@ public void onAddStream(MediaStream mediaStream) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onAddStream"); params.putString("streamId", streamId); + params.putString("ownerTag", id); ConstraintsArray audioTracks = new ConstraintsArray(); ConstraintsArray videoTracks = new ConstraintsArray(); @@ -307,7 +446,6 @@ public void onAddStream(MediaStream mediaStream) { sendEvent(params); } - void sendEvent(ConstraintsMap event) { if (eventSink != null) { eventSink.success(event.toMap()); @@ -326,22 +464,27 @@ public void onRemoveStream(MediaStream mediaStream) { this.remoteTracks.remove(track.id()); } - this.remoteStreams.remove(streamId); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onRemoveStream"); params.putString("streamId", streamId); sendEvent(params); } + @Override + public void onTrack(RtpTransceiver transceiver) { + } + @Override public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { Log.d(TAG, "onAddTrack"); + // for plan-b for (MediaStream stream : mediaStreams) { String streamId = stream.getId(); MediaStreamTrack track = receiver.track(); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onAddTrack"); params.putString("streamId", streamId); + params.putString("ownerTag", id); params.putString("trackId", track.id()); String trackId = track.id(); @@ -354,46 +497,77 @@ public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { trackInfo.putBoolean("remote", true); params.putMap("track", trackInfo.toMap()); sendEvent(params); + + if ("audio".equals(track.kind())) { + AudioSwitchManager.instance.start(); + } + } + + // For unified-plan + ConstraintsMap params = new ConstraintsMap(); + ConstraintsArray streams = new ConstraintsArray(); + for (int i = 0; i < mediaStreams.length; i++) { + MediaStream stream = mediaStreams[i]; + streams.pushMap(new ConstraintsMap(mediaStreamToMap(stream))); } + + params.putString("event", "onTrack"); + params.putArray("streams", streams.toArrayList()); + params.putMap("track", mediaTrackToMap(receiver.track())); + params.putMap("receiver", rtpReceiverToMap(receiver)); + + if (this.configuration.sdpSemantics == PeerConnection.SdpSemantics.UNIFIED_PLAN) { + List transceivers = peerConnection.getTransceivers(); + for (RtpTransceiver transceiver : transceivers) { + if (transceiver.getReceiver() != null && receiver.id().equals(transceiver.getReceiver().id())) { + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + this.transceivers.put(transceiverId,transceiver); + } + params.putMap("transceiver", transceiverToMap(transceiverId, transceiver)); + } + } + } + sendEvent(params); + } + + @Override + public void onRemoveTrack(RtpReceiver rtpReceiver) { + Log.d(TAG, "onRemoveTrack"); + + MediaStreamTrack track = rtpReceiver.track(); + String trackId = track.id(); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", track.kind()); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onRemoveTrack"); + params.putString("trackId", track.id()); + params.putMap("track", trackInfo.toMap()); + sendEvent(params); } @Override public void onDataChannel(DataChannel dataChannel) { - // XXX Unfortunately, the Java WebRTC API doesn't expose the id - // of the underlying C++/native DataChannel (even though the - // WebRTC standard defines the DataChannel.id property). As a - // workaround, generated an id which will surely not clash with - // the ids of the remotely-opened (and standard-compliant - // locally-opened) DataChannels. - int dataChannelId = -1; - // The RTCDataChannel.id space is limited to unsigned short by - // the standard: - // https://www.w3.org/TR/webrtc/#dom-datachannel-id. - // Additionally, 65535 is reserved due to SCTP INIT and - // INIT-ACK chunks only allowing a maximum of 65535 streams to - // be negotiated (as defined by the WebRTC Data Channel - // Establishment Protocol). - for (int i = 65536; i <= Integer.MAX_VALUE; ++i) { - if (null == dataChannels.get(i, null)) { - dataChannelId = i; - break; - } - } - if (-1 == dataChannelId) { - return; - } + String flutterId = getNextDataChannelUUID(); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "didOpenDataChannel"); - params.putInt("id", dataChannelId); + params.putInt("id", dataChannel.id()); params.putString("label", dataChannel.label()); + params.putString("flutterId", flutterId); - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); + dataChannels.put(flutterId, dataChannel); + registerDataChannelObserver(flutterId, dataChannel); sendEvent(params); } - private void registerDataChannelObserver(int dcId, DataChannel dataChannel) { + private void registerDataChannelObserver(String dcId, DataChannel dataChannel) { // DataChannel.registerObserver implementation does not allow to // unregister, so the observer is registered here and is never // unregistered @@ -412,60 +586,574 @@ public void onRenegotiationNeeded() { public void onSignalingChange(PeerConnection.SignalingState signalingState) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "signalingState"); - params.putString("state", signalingStateString(signalingState)); + params.putString("state", Utils.signalingStateString(signalingState)); + sendEvent(params); + } + + @Override + public void onConnectionChange(PeerConnection.PeerConnectionState connectionState) { + Log.d(TAG, "onConnectionChange" + connectionState.name()); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "peerConnectionState"); + params.putString("state", Utils.connectionStateString(connectionState)); sendEvent(params); } @Nullable - private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { - switch (iceConnectionState) { - case NEW: - return "new"; - case CHECKING: - return "checking"; - case CONNECTED: - return "connected"; - case COMPLETED: - return "completed"; - case FAILED: - return "failed"; - case DISCONNECTED: - return "disconnected"; - case CLOSED: - return "closed"; + private String transceiverDirectionString(RtpTransceiver.RtpTransceiverDirection direction) { + switch (direction) { + case SEND_RECV: + return "sendrecv"; + case SEND_ONLY: + return "sendonly"; + case RECV_ONLY: + return "recvonly"; + case INACTIVE: + return "inactive"; + case STOPPED: + return "stopped"; } return null; } + private RtpTransceiver.RtpTransceiverDirection stringToTransceiverDirection(String direction) { + switch (direction) { + case "sendrecv": + return RtpTransceiver.RtpTransceiverDirection.SEND_RECV; + case "sendonly": + return RtpTransceiver.RtpTransceiverDirection.SEND_ONLY; + case "recvonly": + return RtpTransceiver.RtpTransceiverDirection.RECV_ONLY; + case "inactive": + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + case "stopped": + return RtpTransceiver.RtpTransceiverDirection.STOPPED; + } + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + } + + private MediaStreamTrack.MediaType stringToMediaType(String mediaType) { + MediaStreamTrack.MediaType type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if (mediaType.equals("audio")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + else if (mediaType.equals("video")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + return type; + } + + private RtpParameters.Encoding mapToEncoding(Map parameters) { + RtpParameters.Encoding encoding = new RtpParameters.Encoding((String) parameters.get("rid"), true, 1.0); + + if (parameters.get("active") != null) { + encoding.active = (Boolean) parameters.get("active"); + } + + if (parameters.get("ssrc") != null) { + encoding.ssrc = ((Integer) parameters.get("ssrc")).longValue(); + } + + if (parameters.get("minBitrate") != null) { + encoding.minBitrateBps = (Integer) parameters.get("minBitrate"); + } + + if (parameters.get("maxBitrate") != null) { + encoding.maxBitrateBps = (Integer) parameters.get("maxBitrate"); + } + + if (parameters.get("maxFramerate") != null) { + encoding.maxFramerate = (Integer) parameters.get("maxFramerate"); + } + + if (parameters.get("numTemporalLayers") != null) { + encoding.numTemporalLayers = (Integer) parameters.get("numTemporalLayers"); + } + + if (parameters.get("scaleResolutionDownBy") != null) { + encoding.scaleResolutionDownBy = (Double) parameters.get("scaleResolutionDownBy"); + } + + if (parameters.get("scalabilityMode") != null) { + encoding.scalabilityMode = (String) parameters.get("scalabilityMode"); + } + + return encoding; + } + + private RtpTransceiver.RtpTransceiverInit mapToRtpTransceiverInit(Map parameters) { + List streamIds = (List) parameters.get("streamIds"); + List> encodingsParams = (List>) parameters.get("sendEncodings"); + String direction = (String) parameters.get("direction"); + List sendEncodings = new ArrayList<>(); + RtpTransceiver.RtpTransceiverInit init = null; + + if (streamIds == null) { + streamIds = new ArrayList(); + } + + if (direction == null) { + direction = "sendrecv"; + } + + if (encodingsParams != null) { + for (int i = 0; i < encodingsParams.size(); i++) { + Map params = encodingsParams.get(i); + sendEncodings.add(mapToEncoding(params)); + } + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction), streamIds, sendEncodings); + } else { + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction), streamIds); + } + return init; + } + + private RtpParameters updateRtpParameters(RtpParameters parameters, Map newParameters) { + // new + final List> encodings = (List>) newParameters.get("encodings"); + // current + final List nativeEncodings = parameters.encodings; + + String degradationPreference = (String) newParameters.get("degradationPreference"); + if (degradationPreference != null) { + parameters.degradationPreference = RtpParameters.DegradationPreference.valueOf(degradationPreference.toUpperCase().replace("-", "_")); + } + + for (Map encoding : encodings) { + RtpParameters.Encoding currentParams = null; + String rid = (String) encoding.get("rid"); + + // find by rid + if (rid != null) { + for (RtpParameters.Encoding x : nativeEncodings) { + if (rid.equals(x.rid)) { + currentParams = x; + break; + } + } + } + + // fall back to index + if (currentParams == null) { + int idx = encodings.indexOf(encoding); + if (idx < nativeEncodings.size()) { + currentParams = nativeEncodings.get(idx); + } + } + + if (currentParams != null) { + Boolean active = (Boolean) encoding.get("active"); + if (active != null) currentParams.active = active; + Integer maxBitrate = (Integer) encoding.get("maxBitrate"); + if (maxBitrate != null) currentParams.maxBitrateBps = maxBitrate; + Integer minBitrate = (Integer) encoding.get("minBitrate"); + if (minBitrate != null) currentParams.minBitrateBps = minBitrate; + Integer maxFramerate = (Integer) encoding.get("maxFramerate"); + if (maxFramerate != null) currentParams.maxFramerate = maxFramerate; + Integer numTemporalLayers = (Integer) encoding.get("numTemporalLayers"); + if (numTemporalLayers != null) currentParams.numTemporalLayers = numTemporalLayers; + Double scaleResolutionDownBy = (Double) encoding.get("scaleResolutionDownBy"); + if (scaleResolutionDownBy != null) + currentParams.scaleResolutionDownBy = scaleResolutionDownBy; + } + } + + return parameters; + } + + private Map rtpParametersToMap(RtpParameters rtpParameters) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("transactionId", rtpParameters.transactionId); + if(rtpParameters.degradationPreference != null) { + info.putString("degradationPreference", rtpParameters.degradationPreference.name().toLowerCase().replace("_", "-")); + } + ConstraintsMap rtcp = new ConstraintsMap(); + rtcp.putString("cname", rtpParameters.getRtcp().getCname()); + rtcp.putBoolean("reducedSize", rtpParameters.getRtcp().getReducedSize()); + info.putMap("rtcp", rtcp.toMap()); + + ConstraintsArray headerExtensions = new ConstraintsArray(); + for (RtpParameters.HeaderExtension extension : rtpParameters.getHeaderExtensions()) { + ConstraintsMap map = new ConstraintsMap(); + map.putString("uri", extension.getUri()); + map.putInt("id", extension.getId()); + map.putBoolean("encrypted", extension.getEncrypted()); + headerExtensions.pushMap(map); + } + info.putArray("headerExtensions", headerExtensions.toArrayList()); + + ConstraintsArray encodings = new ConstraintsArray(); + for (RtpParameters.Encoding encoding : rtpParameters.encodings) { + ConstraintsMap map = new ConstraintsMap(); + map.putBoolean("active", encoding.active); + if (encoding.rid != null) { + map.putString("rid", encoding.rid); + } + if (encoding.maxBitrateBps != null) { + map.putInt("maxBitrate", encoding.maxBitrateBps); + } + if (encoding.minBitrateBps != null) { + map.putInt("minBitrate", encoding.minBitrateBps); + } + if (encoding.maxFramerate != null) { + map.putInt("maxFramerate", encoding.maxFramerate); + } + if (encoding.numTemporalLayers != null) { + map.putInt("numTemporalLayers", encoding.numTemporalLayers); + } + if (encoding.scaleResolutionDownBy != null) { + map.putDouble("scaleResolutionDownBy", encoding.scaleResolutionDownBy); + } + if (encoding.ssrc != null) { + map.putLong("ssrc", encoding.ssrc); + } + encodings.pushMap(map); + } + info.putArray("encodings", encodings.toArrayList()); + + ConstraintsArray codecs = new ConstraintsArray(); + for (RtpParameters.Codec codec : rtpParameters.codecs) { + ConstraintsMap map = new ConstraintsMap(); + map.putString("name", codec.name); + map.putInt("payloadType", codec.payloadType); + map.putInt("clockRate", codec.clockRate); + if (codec.numChannels != null) { + map.putInt("numChannels", codec.numChannels); + } + map.putMap("parameters", new HashMap(codec.parameters)); + try { + Field field = codec.getClass().getDeclaredField("kind"); + field.setAccessible(true); + if (field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO)) { + map.putString("kind", "audio"); + } else if (field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO)) { + map.putString("kind", "video"); + } + } catch (NoSuchFieldException e1) { + e1.printStackTrace(); + } catch (IllegalArgumentException e1) { + e1.printStackTrace(); + } catch (IllegalAccessException e1) { + e1.printStackTrace(); + } + codecs.pushMap(map); + } + + info.putArray("codecs", codecs.toArrayList()); + return info.toMap(); + } + @Nullable - private String iceGatheringStateString(PeerConnection.IceGatheringState iceGatheringState) { - switch (iceGatheringState) { - case NEW: - return "new"; - case GATHERING: - return "gathering"; - case COMPLETE: - return "complete"; + private Map mediaStreamToMap(MediaStream stream) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("streamId", stream.getId()); + params.putString("ownerTag", id); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + + for (MediaStreamTrack track : stream.audioTracks) { + audioTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); } - return null; + + for (MediaStreamTrack track : stream.videoTracks) { + videoTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); + } + + params.putArray("audioTracks", audioTracks.toArrayList()); + params.putArray("videoTracks", videoTracks.toArrayList()); + return params.toMap(); } @Nullable - private String signalingStateString(PeerConnection.SignalingState signalingState) { - switch (signalingState) { - case STABLE: - return "stable"; - case HAVE_LOCAL_OFFER: - return "have-local-offer"; - case HAVE_LOCAL_PRANSWER: - return "have-local-pranswer"; - case HAVE_REMOTE_OFFER: - return "have-remote-offer"; - case HAVE_REMOTE_PRANSWER: - return "have-remote-pranswer"; - case CLOSED: - return "closed"; + private Map mediaTrackToMap(MediaStreamTrack track) { + ConstraintsMap info = new ConstraintsMap(); + if (track != null) { + info.putString("id", track.id()); + info.putString("label", track.getClass() == VideoTrack.class ? "video" : "audio"); + info.putString("kind", track.kind()); + info.putBoolean("enabled", track.enabled()); + info.putString("readyState", track.state().toString()); } - return null; + return info.toMap(); + } + + private Map dtmfSenderToMap(DtmfSender dtmfSender, String id) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("dtmfSenderId", id); + if (dtmfSender != null) { + info.putInt("interToneGap", dtmfSender.interToneGap()); + info.putInt("duration", dtmfSender.duration()); + } + return info.toMap(); + } + + private Map rtpSenderToMap(RtpSender sender) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("senderId", sender.id()); + info.putBoolean("ownsTrack", true); + info.putMap("dtmfSender", dtmfSenderToMap(sender.dtmf(), sender.id())); + info.putMap("rtpParameters", rtpParametersToMap(sender.getParameters())); + info.putMap("track", mediaTrackToMap(sender.track())); + return info.toMap(); + } + + private Map rtpReceiverToMap(RtpReceiver receiver) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("receiverId", receiver.id()); + info.putMap("rtpParameters", rtpParametersToMap(receiver.getParameters())); + info.putMap("track", mediaTrackToMap(receiver.track())); + return info.toMap(); + } + + Map transceiverToMap(String transceiverId, RtpTransceiver transceiver) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("transceiverId", transceiverId); + if (transceiver.getMid() == null) { + info.putString("mid", ""); + } else { + info.putString("mid", transceiver.getMid()); + } + info.putString("direction", transceiverDirectionString(transceiver.getDirection())); + info.putMap("sender", rtpSenderToMap(transceiver.getSender())); + info.putMap("receiver", rtpReceiverToMap(transceiver.getReceiver())); + return info.toMap(); + } + + Map candidateToMap(IceCandidate candidate) { + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); + candidateParams.putString("sdpMid", candidate.sdpMid); + candidateParams.putString("candidate", candidate.sdp); + return candidateParams.toMap(); + } + + public void addTrack(MediaStreamTrack track, List streamIds, Result result) { + RtpSender sender = peerConnection.addTrack(track, streamIds); + result.success(rtpSenderToMap(sender)); + } + + public void removeTrack(String senderId, Result result) { + RtpSender sender = getRtpSenderById(senderId); + if (sender == null) { + resultError("removeTrack", "sender is null", result); + return; + } + boolean res = peerConnection.removeTrack(sender); + Map params = new HashMap<>(); + params.put("result", res); + result.success(params); + } + + public void addTransceiver(MediaStreamTrack track, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if (transceiverInit != null) { + transceiver = peerConnection.addTransceiver(track, mapToRtpTransceiverInit(transceiverInit)); + } else { + transceiver = peerConnection.addTransceiver(track); + } + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + } + transceivers.put(transceiverId, transceiver); + result.success(transceiverToMap(transceiverId, transceiver)); + } + + public void addTransceiverOfType(String mediaType, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if (transceiverInit != null) { + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType), mapToRtpTransceiverInit(transceiverInit)); + } else { + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType)); + } + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + } + transceivers.put(transceiverId, transceiver); + result.success(transceiverToMap(transceiverId, transceiver)); + } + + public void rtpTransceiverSetDirection(String direction, String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverSetDirection", "transceiver is null", result); + return; + } + transceiver.setDirection(stringToTransceiverDirection(direction)); + result.success(null); + } + + public void rtpTransceiverSetCodecPreferences(String transceiverId, List> codecs, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverSetCodecPreferences", "transceiver is null", result); + return; + } + List preferedCodecs = new ArrayList<>(); + for(Map codec : codecs) { + RtpCapabilities.CodecCapability codecCapability = new RtpCapabilities.CodecCapability(); + String mimeType = (String) codec.get("mimeType"); + List mimeTypeParts = Arrays.asList(mimeType.split("/")); + codecCapability.name = mimeTypeParts.get(1); + codecCapability.kind = stringToMediaType(mimeTypeParts.get(0)); + codecCapability.mimeType = mimeType; + codecCapability.clockRate = (int) codec.get("clockRate"); + if(codec.get("numChannels") != null) + codecCapability.numChannels = (int) codec.get("numChannels"); + if(codec.get("sdpFmtpLine") != null && codec.get("sdpFmtpLine") != "") { + String sdpFmtpLine = (String) codec.get("sdpFmtpLine"); + codecCapability.parameters = new HashMap<>(); + String[] parameters = sdpFmtpLine.split(";"); + for(String parameter : parameters) { + if(parameter.contains("=")) { + List parameterParts = Arrays.asList(parameter.split("=")); + codecCapability.parameters.put(parameterParts.get(0), parameterParts.get(1)); + } else { + codecCapability.parameters.put("", parameter); + } + } + } else { + codecCapability.parameters = new HashMap<>(); + } + preferedCodecs.add(codecCapability); + } + transceiver.setCodecPreferences(preferedCodecs); + result.success(null); + } + + public void rtpTransceiverGetDirection(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverGetDirection", "transceiver is null", result); + return; + } + ConstraintsMap params = new ConstraintsMap(); + params.putString("result", transceiverDirectionString(transceiver.getDirection())); + result.success(params.toMap()); + } + + public void rtpTransceiverGetCurrentDirection(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverGetCurrentDirection", "transceiver is null", result); + return; + } + RtpTransceiver.RtpTransceiverDirection direction = transceiver.getCurrentDirection(); + if (direction == null) { + result.success(null); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("result", transceiverDirectionString(direction)); + result.success(params.toMap()); + } + } + + public void rtpTransceiverStop(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverStop", "transceiver is null", result); + return; + } + transceiver.stop(); + result.success(null); + } + + public void rtpSenderSetParameters(String rtpSenderId, Map parameters, Result result) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetParameters", "sender is null", result); + return; + } + final RtpParameters updatedParameters = updateRtpParameters(sender.getParameters(), parameters); + final Boolean success = sender.setParameters(updatedParameters); + ConstraintsMap params = new ConstraintsMap(); + params.putBoolean("result", success); + result.success(params.toMap()); + } + + public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result result, boolean replace) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetTrack", "sender is null", result); + return; + } + sender.setTrack(track, false); + result.success(null); + } + + public void rtpSenderSetStreams(String rtpSenderId, List streamIds, Result result) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetStream", "sender is null", result); + return; + } + sender.setStreams(streamIds); + result.success(null); } + + public void getSenders(Result result) { + List senders = peerConnection.getSenders(); + ConstraintsArray sendersParams = new ConstraintsArray(); + for (RtpSender sender : senders) { + sendersParams.pushMap(new ConstraintsMap(rtpSenderToMap(sender))); + } + ConstraintsMap params = new ConstraintsMap(); + params.putArray("senders", sendersParams.toArrayList()); + result.success(params.toMap()); + } + + public void getReceivers(Result result) { + List receivers = peerConnection.getReceivers(); + ConstraintsArray receiversParams = new ConstraintsArray(); + for (RtpReceiver receiver : receivers) { + receiversParams.pushMap(new ConstraintsMap(rtpReceiverToMap(receiver))); + } + ConstraintsMap params = new ConstraintsMap(); + params.putArray("receivers", receiversParams.toArrayList()); + result.success(params.toMap()); + } + + public void getTransceivers(Result result) { + List transceivers = peerConnection.getTransceivers(); + ConstraintsArray transceiversParams = new ConstraintsArray(); + for (RtpTransceiver transceiver : transceivers) { + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + this.transceivers.put(transceiverId,transceiver); + } + transceiversParams.pushMap(new ConstraintsMap(transceiverToMap(transceiverId, transceiver))); + } + ConstraintsMap params = new ConstraintsMap(); + params.putArray("transceivers", transceiversParams.toArrayList()); + result.success(params.toMap()); + } + + protected MediaStreamTrack getTransceiversTrack(String trackId) { + if (this.configuration.sdpSemantics != PeerConnection.SdpSemantics.UNIFIED_PLAN) { + return null; + } + MediaStreamTrack track = null; + List transceivers = peerConnection.getTransceivers(); + for (RtpTransceiver transceiver : transceivers) { + RtpReceiver receiver = transceiver.getReceiver(); + if (receiver != null) { + if (receiver.track() != null && receiver.track().id().equals(trackId)) { + track = receiver.track(); + break; + } + } + } + return track; + } + + public String getNextDataChannelUUID() { + String uuid; + + do { + uuid = UUID.randomUUID().toString(); + } while (dataChannels.get(uuid) != null); + + return uuid; + } + } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt b/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt new file mode 100644 index 0000000000..7ad366d387 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt @@ -0,0 +1,237 @@ +package com.cloudwebrtc.webrtc + +import org.webrtc.* +import java.util.concurrent.Callable +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors + +/* +Copyright 2017, Lyo Kato (Original Author) +Copyright 2017-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + */ +internal class SimulcastVideoEncoderFactoryWrapper( + sharedContext: EglBase.Context?, + enableIntelVp8Encoder: Boolean, + enableH264HighProfile: Boolean +) : VideoEncoderFactory { + + /** + * Factory that prioritizes software encoder. + * + * When the selected codec can't be handled by the software encoder, + * it uses the hardware encoder as a fallback. However, this class is + * primarily used to address an issue in libwebrtc, and does not have + * purposeful usecase itself. + * + * To use simulcast in libwebrtc, SimulcastEncoderAdapter is used. + * SimulcastEncoderAdapter takes in a primary and fallback encoder. + * If HardwareVideoEncoderFactory and SoftwareVideoEncoderFactory are + * passed in directly as primary and fallback, when H.264 is used, + * libwebrtc will crash. + * + * This is because SoftwareVideoEncoderFactory does not handle H.264, + * so [SoftwareVideoEncoderFactory.createEncoder] returns null, and + * the libwebrtc side does not handle nulls, regardless of whether the + * fallback is actually used or not. + * + * To avoid nulls, we simply pass responsibility over to the HardwareVideoEncoderFactory. + * This results in HardwareVideoEncoderFactory being both the primary and fallback, + * but there aren't any specific problems in doing so. + */ + private class FallbackFactory(private val hardwareVideoEncoderFactory: VideoEncoderFactory) : + VideoEncoderFactory { + + private val softwareVideoEncoderFactory: VideoEncoderFactory = SoftwareVideoEncoderFactory() + + override fun createEncoder(info: VideoCodecInfo): VideoEncoder? { + val softwareEncoder = softwareVideoEncoderFactory.createEncoder(info) + val hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info) + return if (hardwareEncoder != null && softwareEncoder != null) { + VideoEncoderFallback(hardwareEncoder, softwareEncoder) + } else { + softwareEncoder ?: hardwareEncoder + } + } + + override fun getSupportedCodecs(): Array { + val supportedCodecInfos: MutableList = mutableListOf() + supportedCodecInfos.addAll(softwareVideoEncoderFactory.supportedCodecs) + supportedCodecInfos.addAll(hardwareVideoEncoderFactory.supportedCodecs) + return supportedCodecInfos.toTypedArray() + } + + } + + /** + * Wraps each stream encoder and performs the following: + * - Starts up a single thread + * - When the width/height from [initEncode] doesn't match the frame buffer's, + * scales the frame prior to encoding. + * - Always calls the encoder on the thread. + */ + private class StreamEncoderWrapper(private val encoder: VideoEncoder) : VideoEncoder { + + val executor: ExecutorService = Executors.newSingleThreadExecutor() + var streamSettings: VideoEncoder.Settings? = null + + override fun initEncode( + settings: VideoEncoder.Settings, + callback: VideoEncoder.Callback? + ): VideoCodecStatus { + streamSettings = settings + val future = executor.submit(Callable { + // LKLog.i { + // """initEncode() thread=${Thread.currentThread().name} [${Thread.currentThread().id}] + // | encoder=${encoder.implementationName} + // | streamSettings: + // | numberOfCores=${settings.numberOfCores} + // | width=${settings.width} + // | height=${settings.height} + // | startBitrate=${settings.startBitrate} + // | maxFramerate=${settings.maxFramerate} + // | automaticResizeOn=${settings.automaticResizeOn} + // | numberOfSimulcastStreams=${settings.numberOfSimulcastStreams} + // | lossNotification=${settings.capabilities.lossNotification} + // """.trimMargin() + // } + return@Callable encoder.initEncode(settings, callback) + }) + return future.get() + } + + override fun release(): VideoCodecStatus { + val future = executor.submit(Callable { return@Callable encoder.release() }) + return future.get() + } + + override fun encode( + frame: VideoFrame, + encodeInfo: VideoEncoder.EncodeInfo? + ): VideoCodecStatus { + val future = executor.submit(Callable { + //LKLog.d { "encode() buffer=${frame.buffer}, thread=${Thread.currentThread().name} " + + // "[${Thread.currentThread().id}]" } + if (streamSettings == null) { + return@Callable encoder.encode(frame, encodeInfo) + } else if (frame.buffer.width == streamSettings!!.width) { + return@Callable encoder.encode(frame, encodeInfo) + } else { + // The incoming buffer is different than the streamSettings received in initEncode() + // Need to scale. + val originalBuffer = frame.buffer + // TODO: Do we need to handle when the scale factor is weird? + val adaptedBuffer = originalBuffer.cropAndScale( + 0, 0, originalBuffer.width, originalBuffer.height, + streamSettings!!.width, streamSettings!!.height + ) + val adaptedFrame = VideoFrame(adaptedBuffer, frame.rotation, frame.timestampNs) + val result = encoder.encode(adaptedFrame, encodeInfo) + adaptedBuffer.release() + return@Callable result + } + }) + return future.get() + } + + override fun setRateAllocation( + allocation: VideoEncoder.BitrateAllocation?, + frameRate: Int + ): VideoCodecStatus { + val future = executor.submit(Callable { + return@Callable encoder.setRateAllocation( + allocation, + frameRate + ) + }) + return future.get() + } + + override fun getScalingSettings(): VideoEncoder.ScalingSettings { + val future = executor.submit(Callable { return@Callable encoder.scalingSettings }) + return future.get() + } + + override fun getImplementationName(): String { + val future = executor.submit(Callable { return@Callable encoder.implementationName }) + return future.get() + } + + override fun createNative(webrtcEnvRef: Long): Long { + val future = executor.submit(Callable { return@Callable encoder.createNative(webrtcEnvRef) }) + return future.get() + } + + override fun isHardwareEncoder(): Boolean { + val future = executor.submit(Callable { return@Callable encoder.isHardwareEncoder }) + return future.get() + } + + override fun setRates(rcParameters: VideoEncoder.RateControlParameters?): VideoCodecStatus { + val future = executor.submit(Callable { return@Callable encoder.setRates(rcParameters) }) + return future.get() + } + + override fun getResolutionBitrateLimits(): Array { + val future = executor.submit(Callable { return@Callable encoder.resolutionBitrateLimits }) + return future.get() + } + + override fun getEncoderInfo(): VideoEncoder.EncoderInfo { + val future = executor.submit(Callable { return@Callable encoder.encoderInfo }) + return future.get() + } + } + + private class StreamEncoderWrapperFactory(private val factory: VideoEncoderFactory) : + VideoEncoderFactory { + override fun createEncoder(videoCodecInfo: VideoCodecInfo?): VideoEncoder? { + val encoder = factory.createEncoder(videoCodecInfo) + if (encoder == null) { + return null + } + if (encoder is WrappedNativeVideoEncoder) { + return encoder + } + return StreamEncoderWrapper(encoder) + } + + override fun getSupportedCodecs(): Array { + return factory.supportedCodecs + } + } + + + private val primary: VideoEncoderFactory + private val fallback: VideoEncoderFactory + private val native: SimulcastVideoEncoderFactory + + init { + val hardwareVideoEncoderFactory = HardwareVideoEncoderFactory( + sharedContext, enableIntelVp8Encoder, enableH264HighProfile + ) + primary = StreamEncoderWrapperFactory(hardwareVideoEncoderFactory) + fallback = StreamEncoderWrapperFactory(FallbackFactory(primary)) + native = SimulcastVideoEncoderFactory(primary, fallback) + } + + override fun createEncoder(info: VideoCodecInfo?): VideoEncoder? { + return native.createEncoder(info) + } + + override fun getSupportedCodecs(): Array { + return native.supportedCodecs + } + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java b/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java index 6c0c9f3a5b..0471f21983 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java @@ -1,12 +1,16 @@ package com.cloudwebrtc.webrtc; import android.app.Activity; +import android.content.Context; + import androidx.annotation.Nullable; import java.util.Map; import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnectionFactory; +import io.flutter.plugin.common.BinaryMessenger; + /** * Provides interested components with access to the current application state. * @@ -14,9 +18,11 @@ */ public interface StateProvider { - Map getLocalStreams(); + boolean putLocalStream(String streamId, MediaStream stream); - Map getLocalTracks(); + boolean putLocalTrack(String trackId, LocalTrack track); + + LocalTrack getLocalTrack(String trackId); String getNextStreamUUID(); @@ -24,6 +30,13 @@ public interface StateProvider { PeerConnectionFactory getPeerConnectionFactory(); + PeerConnectionObserver getPeerConnectionObserver(String peerConnectionId); + @Nullable Activity getActivity(); + + @Nullable + Context getApplicationContext(); + + BinaryMessenger getMessenger(); } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java index 32b4745b75..faa783a351 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java @@ -1,6 +1,7 @@ package com.cloudwebrtc.webrtc; import android.graphics.SurfaceTexture; +import android.view.Surface; import org.webrtc.EglBase; import org.webrtc.EglRenderer; @@ -11,6 +12,8 @@ import java.util.concurrent.CountDownLatch; +import io.flutter.view.TextureRegistry; + /** * Display the video stream on a Surface. * renderFrame() is asynchronous to avoid blocking the calling thread. @@ -95,16 +98,35 @@ public void pauseVideo() { // VideoSink interface. @Override public void onFrame(VideoFrame frame) { + if(surface == null) { + producer.setSize(frame.getRotatedWidth(),frame.getRotatedHeight()); + surface = producer.getSurface(); + createEglSurface(surface); + } updateFrameDimensionsAndReportEvents(frame); super.onFrame(frame); } - private SurfaceTexture texture; + private Surface surface = null; - public void surfaceCreated(final SurfaceTexture texture) { + private TextureRegistry.SurfaceProducer producer; + + public void surfaceCreated(final TextureRegistry.SurfaceProducer producer) { ThreadUtils.checkIsOnMainThread(); - this.texture = texture; - createEglSurface(texture); + this.producer = producer; + this.producer.setCallback( + new TextureRegistry.SurfaceProducer.Callback() { + @Override + public void onSurfaceAvailable() { + // Do surface initialization here, and draw the current frame. + } + + @Override + public void onSurfaceCleanup() { + surfaceDestroyed(); + } + } + ); } public void surfaceDestroyed() { @@ -112,6 +134,7 @@ public void surfaceDestroyed() { final CountDownLatch completionLatch = new CountDownLatch(1); releaseEglSurface(completionLatch::countDown); ThreadUtils.awaitUninterruptibly(completionLatch); + surface = null; } // Update frame dimensions and report any changes to |rendererEvents|. @@ -135,7 +158,7 @@ private void updateFrameDimensionsAndReportEvents(VideoFrame frame) { } rotatedFrameWidth = frame.getRotatedWidth(); rotatedFrameHeight = frame.getRotatedHeight(); - texture.setDefaultBufferSize(rotatedFrameWidth, rotatedFrameHeight); + producer.setSize(rotatedFrameWidth, rotatedFrameHeight); frameRotation = frame.getRotation(); } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java new file mode 100644 index 0000000000..df6a7cd7c8 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java @@ -0,0 +1,40 @@ +package com.cloudwebrtc.webrtc.audio; + +import androidx.annotation.Nullable; + +import com.twilio.audioswitch.AudioDevice; + +public enum AudioDeviceKind { + BLUETOOTH("bluetooth", AudioDevice.BluetoothHeadset.class), + WIRED_HEADSET("wired-headset", AudioDevice.WiredHeadset.class), + SPEAKER("speaker", AudioDevice.Speakerphone.class), + EARPIECE("earpiece", AudioDevice.Earpiece.class); + + public final String typeName; + public final Class audioDeviceClass; + + AudioDeviceKind(String typeName, Class audioDeviceClass) { + this.typeName = typeName; + this.audioDeviceClass = audioDeviceClass; + } + + @Nullable + public static AudioDeviceKind fromAudioDevice(AudioDevice audioDevice) { + for (AudioDeviceKind kind : values()) { + if (kind.audioDeviceClass.equals(audioDevice.getClass())) { + return kind; + } + } + return null; + } + + @Nullable + public static AudioDeviceKind fromTypeName(String typeName) { + for (AudioDeviceKind kind : values()) { + if (kind.typeName.equals(typeName)) { + return kind; + } + } + return null; + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingAdapter.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingAdapter.java new file mode 100644 index 0000000000..b91409cb7b --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingAdapter.java @@ -0,0 +1,59 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.ExternalAudioProcessingFactory; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +public class AudioProcessingAdapter implements ExternalAudioProcessingFactory.AudioProcessing { + public interface ExternalAudioFrameProcessing { + void initialize(int sampleRateHz, int numChannels); + + void reset(int newRate); + + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + public AudioProcessingAdapter() {} + List audioProcessors = new ArrayList<>(); + + public void addProcessor(ExternalAudioFrameProcessing audioProcessor) { + synchronized (audioProcessors) { + audioProcessors.add(audioProcessor); + } + } + + public void removeProcessor(ExternalAudioFrameProcessing audioProcessor) { + synchronized (audioProcessors) { + audioProcessors.remove(audioProcessor); + } + } + + @Override + public void initialize(int sampleRateHz, int numChannels) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.initialize(sampleRateHz, numChannels); + } + } + } + + @Override + public void reset(int newRate) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.reset(newRate); + } + } + } + + @Override + public void process(int numBands, int numFrames, ByteBuffer buffer) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.process(numBands, numFrames, buffer); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingController.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingController.java new file mode 100644 index 0000000000..a84589b600 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingController.java @@ -0,0 +1,24 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.ExternalAudioProcessingFactory; + +public class AudioProcessingController { + /** + * This is the audio processing module that will be applied to the audio stream after it is captured from the microphone. + * This is useful for adding echo cancellation, noise suppression, etc. + */ + public final AudioProcessingAdapter capturePostProcessing = new AudioProcessingAdapter(); + /** + * This is the audio processing module that will be applied to the audio stream before it is rendered to the speaker. + */ + public final AudioProcessingAdapter renderPreProcessing = new AudioProcessingAdapter(); + + public ExternalAudioProcessingFactory externalAudioProcessingFactory; + + public AudioProcessingController() { + this.externalAudioProcessingFactory = new ExternalAudioProcessingFactory(); + this.externalAudioProcessingFactory.setCapturePostProcessing(capturePostProcessing); + this.externalAudioProcessingFactory.setRenderPreProcessing(renderPreProcessing); + } + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java new file mode 100644 index 0000000000..a2da4c088e --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java @@ -0,0 +1,410 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioManager; +import android.os.Build; +import android.os.Handler; +import android.os.Looper; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import com.twilio.audioswitch.AudioDevice; +import com.twilio.audioswitch.AudioSwitch; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import kotlin.Unit; +import kotlin.jvm.functions.Function2; + +public class AudioSwitchManager { + + public static final String TAG = "AudioSwitchManager"; + + @SuppressLint("StaticFieldLeak") + public static AudioSwitchManager instance; + @NonNull + private final Context context; + @NonNull + private final AudioManager audioManager; + + public boolean loggingEnabled; + private boolean isActive = false; + @NonNull + public Function2< + ? super List, + ? super AudioDevice, + Unit> audioDeviceChangeListener = (devices, currentDevice) -> null; + + @NonNull + public AudioManager.OnAudioFocusChangeListener audioFocusChangeListener = (i -> { + }); + + @NonNull + public List> preferredDeviceList; + + // AudioSwitch is not threadsafe, so all calls should be done on the main thread. + private final Handler handler = new Handler(Looper.getMainLooper()); + + @Nullable + private AudioSwitch audioSwitch; + + /** + * When true, AudioSwitchManager will request audio focus on start and abandon on stop. + *
+ * Defaults to true. + */ + private boolean manageAudioFocus = true; + + /** + * The audio focus mode to use while started. + *
+ * Defaults to AudioManager.AUDIOFOCUS_GAIN. + */ + private int focusMode = AudioManager.AUDIOFOCUS_GAIN; + + /** + * The audio mode to use while started. + *
+ * Defaults to AudioManager.MODE_NORMAL. + */ + private int audioMode = AudioManager.MODE_IN_COMMUNICATION; + + /** + * The audio stream type to use when requesting audio focus on pre-O devices. + *
+ * Defaults to AudioManager.STREAM_VOICE_CALL. + *
+ * Refer to this compatibility table + * to ensure that your values match between android versions. + *
+ * Note: Manual audio routing may not work appropriately when using non-default values. + */ + private int audioStreamType = AudioManager.STREAM_VOICE_CALL; + + /** + * The audio attribute usage type to use when requesting audio focus on devices O and beyond. + *
+ * Defaults to AudioAttributes.USAGE_VOICE_COMMUNICATION. + *
+ * Refer to this compatibility table + * to ensure that your values match between android versions. + *
+ * Note: Manual audio routing may not work appropriately when using non-default values. + */ + private int audioAttributeUsageType = AudioAttributes.USAGE_VOICE_COMMUNICATION; + + /** + * The audio attribute content type to use when requesting audio focus on devices O and beyond. + *
+ * Defaults to AudioAttributes.CONTENT_TYPE_SPEECH. + *
+ * Refer to this compatibility table + * to ensure that your values match between android versions. + *
+ * Note: Manual audio routing may not work appropriately when using non-default values. + */ + private int audioAttributeContentType = AudioAttributes.CONTENT_TYPE_SPEECH; + + /** + * On certain Android devices, audio routing does not function properly and bluetooth microphones will not work + * unless audio mode is set to [AudioManager.MODE_IN_COMMUNICATION] or [AudioManager.MODE_IN_CALL]. + * + * AudioSwitchManager by default will not handle audio routing in those cases to avoid audio issues. + * + * If this set to true, AudioSwitchManager will attempt to do audio routing, though behavior is undefined. + */ + private boolean forceHandleAudioRouting = false; + + public AudioSwitchManager(@NonNull Context context) { + this.context = context; + this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + + preferredDeviceList = new ArrayList<>(); + preferredDeviceList.add(AudioDevice.BluetoothHeadset.class); + preferredDeviceList.add(AudioDevice.WiredHeadset.class); + preferredDeviceList.add(AudioDevice.Speakerphone.class); + preferredDeviceList.add(AudioDevice.Earpiece.class); + initAudioSwitch(); + } + + private void initAudioSwitch() { + if (audioSwitch == null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + audioSwitch = new AudioSwitch( + context, + loggingEnabled, + audioFocusChangeListener, + preferredDeviceList + ); + audioSwitch.setManageAudioFocus(manageAudioFocus); + audioSwitch.setFocusMode(focusMode); + audioSwitch.setAudioMode(audioMode); + audioSwitch.setAudioStreamType(audioStreamType); + audioSwitch.setAudioAttributeContentType(audioAttributeContentType); + audioSwitch.setAudioAttributeUsageType(audioAttributeUsageType); + audioSwitch.setForceHandleAudioRouting(forceHandleAudioRouting); + audioSwitch.start(audioDeviceChangeListener); + }); + } + } + + public void start() { + if (audioSwitch != null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + if (!isActive) { + Objects.requireNonNull(audioSwitch).activate(); + isActive = true; + } + }); + } + } + + public void stop() { + if (audioSwitch != null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + if (isActive) { + Objects.requireNonNull(audioSwitch).deactivate(); + isActive = false; + } + }); + } + } + + public void setMicrophoneMute(boolean mute) { + audioManager.setMicrophoneMute(mute); + } + + @Nullable + public AudioDevice selectedAudioDevice() { + return Objects.requireNonNull(audioSwitch).getSelectedAudioDevice(); + } + + @NonNull + public List availableAudioDevices() { + return Objects.requireNonNull(audioSwitch).getAvailableAudioDevices(); + } + + public void selectAudioOutput(@NonNull Class audioDeviceClass) { + handler.post(() -> { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + for (AudioDevice device : devices) { + if (device.getClass().equals(audioDeviceClass)) { + audioDevice = device; + break; + } + } + if (audioDevice != null) { + Objects.requireNonNull(audioSwitch).selectDevice(audioDevice); + } + }); + } + + private void updatePreferredDeviceList(boolean speakerOn) { + preferredDeviceList = new ArrayList<>(); + preferredDeviceList.add(AudioDevice.BluetoothHeadset.class); + preferredDeviceList.add(AudioDevice.WiredHeadset.class); + if (speakerOn) { + preferredDeviceList.add(AudioDevice.Speakerphone.class); + preferredDeviceList.add(AudioDevice.Earpiece.class); + } else { + preferredDeviceList.add(AudioDevice.Earpiece.class); + preferredDeviceList.add(AudioDevice.Speakerphone.class); + } + handler.post(() -> { + Objects.requireNonNull(audioSwitch).setPreferredDeviceList(preferredDeviceList); + }); + } + + public void enableSpeakerphone(boolean enable) { + updatePreferredDeviceList(enable); + if (enable) { + selectAudioOutput(AudioDevice.Speakerphone.class); + } else { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + for (AudioDevice device : devices) { + if (device.getClass().equals(AudioDevice.BluetoothHeadset.class)) { + audioDevice = device; + break; + } else if (device.getClass().equals(AudioDevice.WiredHeadset.class)) { + audioDevice = device; + break; + } else if (device.getClass().equals(AudioDevice.Earpiece.class)) { + audioDevice = device; + break; + } + } + if (audioDevice != null) { + selectAudioOutput(audioDevice.getClass()); + } else { + handler.post(() -> { + Objects.requireNonNull(audioSwitch).selectDevice(null); + }); + } + } + } + + public void enableSpeakerButPreferBluetooth() { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + for (AudioDevice device : devices) { + if (device.getClass().equals(AudioDevice.BluetoothHeadset.class)) { + audioDevice = device; + break; + } else if (device.getClass().equals(AudioDevice.WiredHeadset.class)) { + audioDevice = device; + break; + } + } + + if (audioDevice == null) { + selectAudioOutput(AudioDevice.Speakerphone.class); + } else { + selectAudioOutput(audioDevice.getClass()); + } + } + + public void selectAudioOutput(@Nullable AudioDeviceKind kind) { + if (kind != null) { + selectAudioOutput(kind.audioDeviceClass); + } + } + + public void setAudioConfiguration(Map configuration) { + if (configuration == null) { + return; + } + + Boolean manageAudioFocus = null; + if (configuration.get("manageAudioFocus") instanceof Boolean) { + manageAudioFocus = (Boolean) configuration.get("manageAudioFocus"); + } + setManageAudioFocus(manageAudioFocus); + + String audioMode = null; + if (configuration.get("androidAudioMode") instanceof String) { + audioMode = (String) configuration.get("androidAudioMode"); + } + setAudioMode(audioMode); + + String focusMode = null; + if (configuration.get("androidAudioFocusMode") instanceof String) { + focusMode = (String) configuration.get("androidAudioFocusMode"); + } + setFocusMode(focusMode); + + String streamType = null; + if (configuration.get("androidAudioStreamType") instanceof String) { + streamType = (String) configuration.get("androidAudioStreamType"); + } + setAudioStreamType(streamType); + + String usageType = null; + if (configuration.get("androidAudioAttributesUsageType") instanceof String) { + usageType = (String) configuration.get("androidAudioAttributesUsageType"); + } + setAudioAttributesUsageType(usageType); + + String contentType = null; + if (configuration.get("androidAudioAttributesContentType") instanceof String) { + contentType = (String) configuration.get("androidAudioAttributesContentType"); + } + setAudioAttributesContentType(contentType); + + Boolean forceHandleAudioRouting = null; + if (configuration.get("forceHandleAudioRouting") instanceof Boolean) { + forceHandleAudioRouting = (Boolean) configuration.get("forceHandleAudioRouting"); + } + setForceHandleAudioRouting(forceHandleAudioRouting); + } + + public void setManageAudioFocus(@Nullable Boolean manage) { + if (manage != null && audioSwitch != null) { + this.manageAudioFocus = manage; + Objects.requireNonNull(audioSwitch).setManageAudioFocus(this.manageAudioFocus); + } + } + + public void setAudioMode(@Nullable String audioModeString) { + Integer audioMode = AudioUtils.getAudioModeForString(audioModeString); + + if (audioMode == null) { + return; + } + this.audioMode = audioMode; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioMode(audioMode); + } + } + + public void setFocusMode(@Nullable String focusModeString) { + Integer focusMode = AudioUtils.getFocusModeForString(focusModeString); + + if (focusMode == null) { + return; + } + this.focusMode = focusMode; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setFocusMode(focusMode); + } + } + + public void setAudioStreamType(@Nullable String streamTypeString) { + Integer streamType = AudioUtils.getStreamTypeForString(streamTypeString); + + if (streamType == null) { + return; + } + this.audioStreamType = streamType; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioStreamType(this.audioStreamType); + } + } + + public void setAudioAttributesUsageType(@Nullable String usageTypeString) { + Integer usageType = AudioUtils.getAudioAttributesUsageTypeForString(usageTypeString); + + if (usageType == null) { + return; + } + this.audioAttributeUsageType = usageType; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioAttributeUsageType(this.audioAttributeUsageType); + } + } + + public void setAudioAttributesContentType(@Nullable String contentTypeString) { + Integer contentType = AudioUtils.getAudioAttributesContentTypeFromString(contentTypeString); + + if (contentType == null) { + return; + } + this.audioAttributeContentType = contentType; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioAttributeContentType(this.audioAttributeContentType); + } + } + + public void setForceHandleAudioRouting(@Nullable Boolean force) { + if (force != null && audioSwitch != null) { + this.forceHandleAudioRouting = force; + Objects.requireNonNull(audioSwitch).setForceHandleAudioRouting(this.forceHandleAudioRouting); + } + } + + public void clearCommunicationDevice() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + audioManager.clearCommunicationDevice(); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java new file mode 100644 index 0000000000..13dd4ba233 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java @@ -0,0 +1,264 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.media.AudioAttributes; +import android.media.AudioDeviceInfo; +import android.media.AudioManager; +import android.os.Build; +import android.util.Log; + +import androidx.annotation.Nullable; + +public class AudioUtils { + + private static final String TAG = "AudioUtils"; + + @Nullable + public static Integer getAudioModeForString(@Nullable String audioModeString) { + if (audioModeString == null) { + return null; + } + + Integer audioMode = null; + switch (audioModeString) { + case "normal": + audioMode = AudioManager.MODE_NORMAL; + break; + case "callScreening": + audioMode = AudioManager.MODE_CALL_SCREENING; + break; + case "inCall": + audioMode = AudioManager.MODE_IN_CALL; + break; + case "inCommunication": + audioMode = AudioManager.MODE_IN_COMMUNICATION; + break; + case "ringtone": + audioMode = AudioManager.MODE_RINGTONE; + break; + default: + Log.w(TAG, "Unknown audio mode: " + audioModeString); + break; + } + + return audioMode; + } + + @Nullable + public static Integer getFocusModeForString(@Nullable String focusModeString) { + if (focusModeString == null) { + return null; + } + + Integer focusMode = null; + switch (focusModeString) { + case "gain": + focusMode = AudioManager.AUDIOFOCUS_GAIN; + break; + case "gainTransient": + focusMode = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT; + break; + case "gainTransientExclusive": + focusMode = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE; + break; + case "gainTransientMayDuck": + focusMode = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; + break; + case "loss": + focusMode = AudioManager.AUDIOFOCUS_LOSS; + break; + default: + Log.w(TAG, "Unknown audio focus mode: " + focusModeString); + break; + } + + return focusMode; + } + + @Nullable + public static Integer getStreamTypeForString(@Nullable String streamTypeString) { + if (streamTypeString == null) { + return null; + } + + Integer streamType = null; + switch (streamTypeString) { + case "accessibility": + streamType = AudioManager.STREAM_ACCESSIBILITY; + break; + case "alarm": + streamType = AudioManager.STREAM_ALARM; + break; + case "dtmf": + streamType = AudioManager.STREAM_DTMF; + break; + case "music": + streamType = AudioManager.STREAM_MUSIC; + break; + case "notification": + streamType = AudioManager.STREAM_NOTIFICATION; + break; + case "ring": + streamType = AudioManager.STREAM_RING; + break; + case "system": + streamType = AudioManager.STREAM_SYSTEM; + break; + case "voiceCall": + streamType = AudioManager.STREAM_VOICE_CALL; + break; + default: + Log.w(TAG, "Unknown audio stream type: " + streamTypeString); + break; + } + + return streamType; + } + + @Nullable + public static Integer getAudioAttributesUsageTypeForString(@Nullable String usageTypeString) { + + if (usageTypeString == null) { + return null; + } + + Integer usageType = null; + switch (usageTypeString) { + case "alarm": + usageType = AudioAttributes.USAGE_ALARM; + break; + case "assistanceAccessibility": + usageType = AudioAttributes.USAGE_ASSISTANCE_ACCESSIBILITY; + break; + case "assistanceNavigationGuidance": + usageType = AudioAttributes.USAGE_ASSISTANCE_NAVIGATION_GUIDANCE; + break; + case "assistanceSonification": + usageType = AudioAttributes.USAGE_ASSISTANCE_SONIFICATION; + break; + case "assistant": + usageType = AudioAttributes.USAGE_ASSISTANT; + break; + case "game": + usageType = AudioAttributes.USAGE_GAME; + break; + case "media": + usageType = AudioAttributes.USAGE_MEDIA; + break; + case "notification": + usageType = AudioAttributes.USAGE_NOTIFICATION; + break; + case "notificationEvent": + usageType = AudioAttributes.USAGE_NOTIFICATION_EVENT; + break; + case "notificationRingtone": + usageType = AudioAttributes.USAGE_NOTIFICATION_RINGTONE; + break; + case "unknown": + usageType = AudioAttributes.USAGE_UNKNOWN; + break; + case "voiceCommunication": + usageType = AudioAttributes.USAGE_VOICE_COMMUNICATION; + break; + case "voiceCommunicationSignalling": + usageType = AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING; + break; + default: + Log.w(TAG, "Unknown audio attributes usage type: " + usageTypeString); + break; + } + + return usageType; + } + + @Nullable + public static Integer getAudioAttributesContentTypeFromString(@Nullable String contentTypeString) { + + if (contentTypeString == null) { + return null; + } + + Integer contentType = null; + switch (contentTypeString) { + case "movie": + contentType = AudioAttributes.CONTENT_TYPE_MOVIE; + break; + case "music": + contentType = AudioAttributes.CONTENT_TYPE_MUSIC; + break; + case "sonification": + contentType = AudioAttributes.CONTENT_TYPE_SONIFICATION; + break; + case "speech": + contentType = AudioAttributes.CONTENT_TYPE_SPEECH; + break; + case "unknown": + contentType = AudioAttributes.CONTENT_TYPE_UNKNOWN; + break; + default: + Log.w(TAG, "Unknown audio attributes content type:" + contentTypeString); + break; + } + + return contentType; + + } + + static public String getAudioDeviceId(AudioDeviceInfo device) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return "audio-1"; + } else { + + String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? "" : device.getAddress(); + String deviceId = "" + device.getId(); + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + deviceId = "microphone-" + address; + } + if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + deviceId = "wired-headset"; + } + if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + deviceId = "bluetooth"; + } + return deviceId; + } + } + + static public String getAudioGroupId(AudioDeviceInfo device) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return "microphone"; + } else { + String groupId = "" + device.getType(); + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + groupId = "microphone"; + } + if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + groupId = "wired-headset"; + } + if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + groupId = "bluetooth"; + } + return groupId; + } + } + + static public String getAudioDeviceLabel(AudioDeviceInfo device) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return "Audio"; + } else { + String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? "" : device.getAddress(); + String label = device.getProductName().toString(); + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + label = "Built-in Microphone (" + address + ")"; + } + + if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + label = "Wired Headset Microphone"; + } + + if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + label = device.getProductName().toString(); + } + return label; + } + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/LocalAudioTrack.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/LocalAudioTrack.java new file mode 100644 index 0000000000..165d15459f --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/LocalAudioTrack.java @@ -0,0 +1,73 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.media.AudioFormat; +import android.os.SystemClock; + +import com.cloudwebrtc.webrtc.LocalTrack; + +import org.webrtc.AudioTrack; +import org.webrtc.AudioTrackSink; +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +/** + * LocalAudioTrack represents an audio track that is sourced from local audio capture. + */ +public class LocalAudioTrack + extends LocalTrack implements JavaAudioDeviceModule.SamplesReadyCallback { + public LocalAudioTrack(AudioTrack audioTrack) { + super(audioTrack); + } + + final List sinks = new ArrayList<>(); + + /** + * Add a sink to receive audio data from this track. + */ + public void addSink(AudioTrackSink sink) { + synchronized (sinks) { + sinks.add(sink); + } + } + + /** + * Remove a sink for this track. + */ + public void removeSink(AudioTrackSink sink) { + synchronized (sinks) { + sinks.remove(sink); + } + } + + private int getBytesPerSample(int audioFormat) { + switch (audioFormat) { + case AudioFormat.ENCODING_PCM_8BIT: + return 1; + case AudioFormat.ENCODING_PCM_16BIT: + case AudioFormat.ENCODING_IEC61937: + case AudioFormat.ENCODING_DEFAULT: + return 2; + case AudioFormat.ENCODING_PCM_FLOAT: + return 4; + default: + throw new IllegalArgumentException("Bad audio format " + audioFormat); + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + int bitsPerSample = getBytesPerSample(audioSamples.getAudioFormat()) * 8; + int numFrames = audioSamples.getSampleRate() / 100; + long timestamp = SystemClock.elapsedRealtime(); + synchronized (sinks) { + for (AudioTrackSink sink : sinks) { + ByteBuffer byteBuffer = ByteBuffer.wrap(audioSamples.getData()); + sink.onData(byteBuffer, bitsPerSample, audioSamples.getSampleRate(), + audioSamples.getChannelCount(), numFrames, timestamp); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java new file mode 100644 index 0000000000..25195104fc --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java @@ -0,0 +1,32 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.util.ArrayList; +import java.util.List; + +public class PlaybackSamplesReadyCallbackAdapter + implements JavaAudioDeviceModule.PlaybackSamplesReadyCallback { + public PlaybackSamplesReadyCallbackAdapter() {} + + List callbacks = new ArrayList<>(); + + public void addCallback(JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.add(callback); + } + } + + public void removeCallback(JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.remove(callback); + } + } + + @Override + public void onWebRtcAudioTrackSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + for (JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback : callbacks) { + callback.onWebRtcAudioTrackSamplesReady(audioSamples); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/RecordSamplesReadyCallbackAdapter.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/RecordSamplesReadyCallbackAdapter.java new file mode 100644 index 0000000000..959062a930 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/RecordSamplesReadyCallbackAdapter.java @@ -0,0 +1,34 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.util.ArrayList; +import java.util.List; + +public class RecordSamplesReadyCallbackAdapter + implements JavaAudioDeviceModule.SamplesReadyCallback { + public RecordSamplesReadyCallbackAdapter() {} + + List callbacks = new ArrayList<>(); + + public void addCallback(JavaAudioDeviceModule.SamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.add(callback); + } + } + + public void removeCallback(JavaAudioDeviceModule.SamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.remove(callback); + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + synchronized (callbacks) { + for (JavaAudioDeviceModule.SamplesReadyCallback callback : callbacks) { + callback.onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java index 0700318e4b..fb48c68a15 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java @@ -18,11 +18,12 @@ import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.Arrays; import io.flutter.plugin.common.MethodChannel; public class FrameCapturer implements VideoSink { - private VideoTrack videoTrack; + private final VideoTrack videoTrack; private File file; private final MethodChannel.Result callback; private boolean gotFrame = false; @@ -55,15 +56,28 @@ public void onFrame(VideoFrame videoFrame) { final int chromaWidth = (width + 1) / 2; final int chromaHeight = (height + 1) / 2; final int minSize = width * height + chromaWidth * chromaHeight * 2; + ByteBuffer yuvBuffer = ByteBuffer.allocateDirect(minSize); + // NV21 is the same as NV12, only that V and U are stored in the reverse oder + // NV21 (YYYYYYYYY:VUVU) + // NV12 (YYYYYYYYY:UVUV) + // Therefore we can use the NV12 helper, but swap the U and V input buffers YuvHelper.I420ToNV12(y, strides[0], v, strides[2], u, strides[1], yuvBuffer, width, height); + + // For some reason the ByteBuffer may have leading 0. We remove them as + // otherwise the + // image will be shifted + byte[] cleanedArray = Arrays.copyOfRange(yuvBuffer.array(), yuvBuffer.arrayOffset(), minSize); + YuvImage yuvImage = new YuvImage( - yuvBuffer.array(), + cleanedArray, ImageFormat.NV21, width, height, - strides - ); + // We omit the strides here. If they were included, the resulting image would + // have its colors offset. + null); + i420Buffer.release(); videoFrame.release(); new Handler(Looper.getMainLooper()).post(() -> { videoTrack.removeSink(this); @@ -111,5 +125,4 @@ public void onFrame(VideoFrame videoFrame) { file = null; } } - } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java index 3ec5dd3e0c..030c36bf16 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java @@ -1,3 +1,5 @@ +// Modifications by Signify, Copyright 2025, Signify Holding - SPDX-License-Identifier: MIT + package com.cloudwebrtc.webrtc.record; import android.media.MediaCodec; @@ -19,6 +21,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.util.concurrent.CountDownLatch; class VideoFileRenderer implements VideoSink, SamplesReadyCallback { private static final String TAG = "VideoFileRenderer"; @@ -32,7 +35,7 @@ class VideoFileRenderer implements VideoSink, SamplesReadyCallback { private ByteBuffer[] audioInputBuffers; private ByteBuffer[] audioOutputBuffers; private EglBase eglBase; - private EglBase.Context sharedContext; + private final EglBase.Context sharedContext; private VideoFrameDrawer frameDrawer; // TODO: these ought to be configurable as well @@ -40,9 +43,10 @@ class VideoFileRenderer implements VideoSink, SamplesReadyCallback { private static final int FRAME_RATE = 30; // 30fps private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames - private MediaMuxer mediaMuxer; + private final MediaMuxer mediaMuxer; private MediaCodec encoder; - private MediaCodec.BufferInfo bufferInfo, audioBufferInfo; + private final MediaCodec.BufferInfo bufferInfo; + private MediaCodec.BufferInfo audioBufferInfo; private int trackIndex = -1; private int audioTrackIndex; private boolean isRunning = true; @@ -126,27 +130,50 @@ private void renderFrameOnRenderThread(VideoFrame frame) { /** * Release all resources. All already posted frames will be rendered first. */ + // Start Signify modification void release() { isRunning = false; - if (audioThreadHandler != null) + CountDownLatch latch = new CountDownLatch(audioThreadHandler != null ? 2 : 1); + if (audioThreadHandler != null) { audioThreadHandler.post(() -> { - if (audioEncoder != null) { - audioEncoder.stop(); - audioEncoder.release(); + try{ + if (audioEncoder != null) { + audioEncoder.stop(); + audioEncoder.release(); + } + audioThread.quit(); + } finally { + latch.countDown(); } - audioThread.quit(); }); + } + renderThreadHandler.post(() -> { - if (encoder != null) { - encoder.stop(); - encoder.release(); + try { + if (encoder != null) { + encoder.stop(); + encoder.release(); + } + eglBase.release(); + if (muxerStarted) { + mediaMuxer.stop(); + mediaMuxer.release(); + muxerStarted = false; + } + renderThread.quit(); + } finally { + latch.countDown(); } - eglBase.release(); - mediaMuxer.stop(); - mediaMuxer.release(); - renderThread.quit(); }); + + try { + latch.await(); + } catch (InterruptedException e) { + Log.e(TAG, "Release interrupted", e); + Thread.currentThread().interrupt(); + } } + // End Signify modification private boolean encoderStarted = false; private volatile boolean muxerStarted = false; @@ -173,7 +200,7 @@ private void drainEncoder() { Log.e(TAG, "encoder output format changed: " + newFormat); trackIndex = mediaMuxer.addTrack(newFormat); - if (audioTrackIndex != -1 && !muxerStarted) { + if (trackIndex != -1 && !muxerStarted) { mediaMuxer.start(); muxerStarted = true; } @@ -229,7 +256,7 @@ private void drainAudio() { Log.w(TAG, "encoder output format changed: " + newFormat); audioTrackIndex = mediaMuxer.addTrack(newFormat); - if (trackIndex != -1 && !muxerStarted) { + if (audioTrackIndex != -1 && !muxerStarted) { mediaMuxer.start(); muxerStarted = true; } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java index eb16e20697..5a6d8b6d3b 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java @@ -72,7 +72,7 @@ public ObjectType getType(String name) { } else if (value instanceof Byte) { return ObjectType.Byte; } else { - throw new IllegalArgumentException("Invalid value " + value.toString() + " for key " + name + + throw new IllegalArgumentException("Invalid value " + value + " for key " + name + "contained in ConstraintsMap"); } } @@ -89,6 +89,10 @@ public void putInt(String key, int value) { mMap.put(key, value); } + public void putLong(String key, long value) { + mMap.put(key, value); + } + public void putString(String key, String value) { mMap.put(key, value); } @@ -124,4 +128,11 @@ public ConstraintsArray getArray(String name){ public ArrayList getListArray(String name){ return (ArrayList) mMap.get(name); } + + @Override + public String toString() { + return "ConstraintsMap{" + + "mMap=" + mMap + + '}'; + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java index ce41031a8c..3ba4ae9824 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java @@ -24,6 +24,11 @@ public class MediaConstraintsUtils { public static MediaConstraints parseMediaConstraints(ConstraintsMap constraints) { MediaConstraints mediaConstraints = new MediaConstraints(); + // TODO: change getUserMedia constraints format to support new syntax + // constraint format seems changed, and there is no mandatory any more. + // and has a new syntax/attrs to specify resolution + // should change `parseConstraints()` according + // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints if (constraints.hasKey("mandatory") && constraints.getType("mandatory") == ObjectType.Map) { parseConstraints(constraints.getMap("mandatory"), diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java index c86d9c3a1c..5e9c8f6033 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java @@ -3,6 +3,7 @@ import android.app.Activity; import android.app.Fragment; import android.app.FragmentTransaction; +import android.content.Context; import android.content.pm.PackageManager; import android.os.Build; import android.os.Build.VERSION_CODES; @@ -12,6 +13,8 @@ import android.os.ResultReceiver; import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; +import androidx.core.app.ActivityCompat; + import java.util.ArrayList; /** Helper module for dealing with dynamic permissions, introduced in Android M (API level 23). */ @@ -30,7 +33,7 @@ public class PermissionUtils { private static int requestCode; private static void requestPermissions( - Activity activity, String[] permissions, ResultReceiver resultReceiver) { + Context context, Activity activity, String[] permissions, ResultReceiver resultReceiver) { // Ask the Context whether we have already been granted the requested // permissions. int size = permissions.length; @@ -42,7 +45,11 @@ private static void requestPermissions( // No need to ask for permission on pre-Marshmallow if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) grantResult = PackageManager.PERMISSION_GRANTED; - else grantResult = activity.checkSelfPermission(permissions[i]); + else if (activity != null){ + grantResult = activity.checkSelfPermission(permissions[i]); + } else { + grantResult = ActivityCompat.checkSelfPermission(context, permissions[i]); + } grantResults[i] = grantResult; if (grantResult != PackageManager.PERMISSION_GRANTED) { @@ -64,7 +71,7 @@ private static void requestPermissions( // must still use old permissions model, regardless of the // Android version on the device. || Build.VERSION.SDK_INT < Build.VERSION_CODES.M - || activity.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.M) { + || context.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.M) { send(resultReceiver, requestCode, permissions, grantResults); return; } @@ -77,23 +84,29 @@ private static void requestPermissions( RequestPermissionsFragment fragment = new RequestPermissionsFragment(); fragment.setArguments(args); - FragmentTransaction transaction = - activity - .getFragmentManager() - .beginTransaction() - .add(fragment, fragment.getClass().getName() + "-" + requestCode); - - try { - transaction.commit(); - } catch (IllegalStateException ise) { - // Context is a Plugin, just send result back. - send(resultReceiver, requestCode, permissions, grantResults); + if(activity != null){ + FragmentTransaction transaction = + activity + .getFragmentManager() + .beginTransaction() + .add(fragment, fragment.getClass().getName() + "-" + requestCode); + + try { + transaction.commit(); + } catch (IllegalStateException ise) { + // Context is a Plugin, just send result back. + send(resultReceiver, requestCode, permissions, grantResults); + } } } public static void requestPermissions( - final Activity activity, final String[] permissions, final Callback callback) { + final Context context, + final Activity activity, + final String[] permissions, + final Callback callback) { requestPermissions( + context, activity, permissions, new ResultReceiver(new Handler(Looper.getMainLooper())) { @@ -191,9 +204,10 @@ public void onRequestPermissionsResult( // the invocation so we have to redo the permission request. finish(); PermissionUtils.requestPermissions( - getActivity(), - args.getStringArray(PERMISSIONS), - (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); + getContext(), + getActivity(), + args.getStringArray(PERMISSIONS), + (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); } else { // We did not ask for all requested permissions, just the denied // ones. But when we send the result, we have to answer about diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCAudioManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCAudioManager.java deleted file mode 100644 index 0ea81cf5ce..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCAudioManager.java +++ /dev/null @@ -1,590 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.content.BroadcastReceiver; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.content.SharedPreferences; -import android.content.pm.PackageManager; -import android.media.AudioDeviceInfo; -import android.media.AudioManager; -import android.os.Build; -import android.preference.PreferenceManager; -import android.util.Log; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import com.cloudwebrtc.webrtc.utils.RTCUtils; -import org.webrtc.ThreadUtils; - -/** - * RTCAudioManager manages all audio related parts of the plugin. - */ -public class RTCAudioManager { - private static final String TAG = "RTCAudioManager"; - private static final String SPEAKERPHONE_AUTO = "auto"; - private static final String SPEAKERPHONE_TRUE = "true"; - private static final String SPEAKERPHONE_FALSE = "false"; - - /** - * AudioDevice is the names of possible audio devices that we currently - * support. - */ - public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE } - - /** AudioManager state. */ - public enum AudioManagerState { - UNINITIALIZED, - PREINITIALIZED, - RUNNING, - } - - /** Selected audio device change event. */ - public interface AudioManagerEvents { - // Callback fired once audio device is changed or list of available audio devices changed. - void onAudioDeviceChanged( - AudioDevice selectedAudioDevice, Set availableAudioDevices); - } - - private final Context appContext; - private AudioManager audioManager; - - private AudioManagerEvents audioManagerEvents; - private AudioManagerState amState; - private int savedAudioMode = AudioManager.MODE_INVALID; - private boolean savedIsSpeakerPhoneOn; - private boolean savedIsMicrophoneMute; - private boolean hasWiredHeadset; - - // Default audio device; speaker phone for video calls or earpiece for audio - // only calls. - private AudioDevice defaultAudioDevice; - - // Contains the currently selected audio device. - // This device is changed automatically using a certain scheme where e.g. - // a wired headset "wins" over speaker phone. It is also possible for a - // user to explicitly select a device (and overrid any predefined scheme). - // See |userSelectedAudioDevice| for details. - private AudioDevice selectedAudioDevice; - - // Contains the user-selected audio device which overrides the predefined - // selection scheme. - // TODO(henrika): always set to AudioDevice.NONE today. Add support for - // explicit selection based on choice by userSelectedAudioDevice. - private AudioDevice userSelectedAudioDevice; - - // Contains speakerphone setting: auto, true or false - private final String useSpeakerphone; - - // Proximity sensor object. It measures the proximity of an object in cm - // relative to the view screen of a device and can therefore be used to - // assist device switching (close to ear <=> use headset earpiece if - // available, far from ear <=> use speaker phone). - private RTCProximitySensor proximitySensor; - - // Handles all tasks related to Bluetooth headset devices. - private final RTCBluetoothManager bluetoothManager; - - // Contains a list of available audio devices. A Set collection is used to - // avoid duplicate elements. - private Set audioDevices = new HashSet<>(); - - // Broadcast receiver for wired headset intent broadcasts. - private BroadcastReceiver wiredHeadsetReceiver; - - // Callback method for changes in audio focus. - - private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener; - - /** - * This method is called when the proximity sensor reports a state change, - * e.g. from "NEAR to FAR" or from "FAR to NEAR". - */ - private void onProximitySensorChangedState() { - if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) { - return; - } - - // The proximity sensor should only be activated when there are exactly two - // available audio devices. - if (audioDevices.size() == 2 && audioDevices.contains(RTCAudioManager.AudioDevice.EARPIECE) - && audioDevices.contains(RTCAudioManager.AudioDevice.SPEAKER_PHONE)) { - if (proximitySensor.sensorReportsNearState()) { - // Sensor reports that a "handset is being held up to a person's ear", - // or "something is covering the light sensor". - setAudioDeviceInternal(RTCAudioManager.AudioDevice.EARPIECE); - } else { - // Sensor reports that a "handset is removed from a person's ear", or - // "the light sensor is no longer covered". - setAudioDeviceInternal(RTCAudioManager.AudioDevice.SPEAKER_PHONE); - } - } - } - - /* Receiver which handles changes in wired headset availability. */ - private class WiredHeadsetReceiver extends BroadcastReceiver { - private static final int STATE_UNPLUGGED = 0; - private static final int STATE_PLUGGED = 1; - private static final int HAS_NO_MIC = 0; - private static final int HAS_MIC = 1; - - @Override - public void onReceive(Context context, Intent intent) { - int state = intent.getIntExtra("state", STATE_UNPLUGGED); - int microphone = intent.getIntExtra("microphone", HAS_NO_MIC); - String name = intent.getStringExtra("name"); - Log.d(TAG, "WiredHeadsetReceiver.onReceive" + RTCUtils.getThreadInfo() + ": " - + "a=" + intent.getAction() + ", s=" - + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m=" - + (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb=" - + isInitialStickyBroadcast()); - hasWiredHeadset = (state == STATE_PLUGGED); - updateAudioDeviceState(); - } - } - - /** Construction. */ - public static RTCAudioManager create(Context context) { - return new RTCAudioManager(context); - } - - private RTCAudioManager(Context context) { - Log.d(TAG, "ctor"); - ThreadUtils.checkIsOnMainThread(); - appContext = context; - audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE)); - bluetoothManager = RTCBluetoothManager.create(context, this); - wiredHeadsetReceiver = new WiredHeadsetReceiver(); - amState = AudioManagerState.UNINITIALIZED; - - useSpeakerphone = SPEAKERPHONE_AUTO; - - Log.d(TAG, "useSpeakerphone: " + useSpeakerphone); - if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) { - defaultAudioDevice = AudioDevice.EARPIECE; - } else { - defaultAudioDevice = AudioDevice.SPEAKER_PHONE; - } - - // Create and initialize the proximity sensor. - // Tablet devices (e.g. Nexus 7) does not support proximity sensors. - // Note that, the sensor will not be active until start() has been called. - proximitySensor = RTCProximitySensor.create(context, - // This method will be called each time a state change is detected. - // Example: user holds his hand over the device (closer than ~5 cm), - // or removes his hand from the device. - this ::onProximitySensorChangedState); - - Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice); - RTCUtils.logDeviceInfo(TAG); - } - - @SuppressWarnings("deprecation") // TODO(henrika): audioManager.requestAudioFocus() is deprecated. - public void start(AudioManagerEvents audioManagerEvents) { - Log.d(TAG, "start"); - ThreadUtils.checkIsOnMainThread(); - if (amState == AudioManagerState.RUNNING) { - Log.e(TAG, "AudioManager is already active"); - return; - } - // TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED. - - Log.d(TAG, "AudioManager starts..."); - this.audioManagerEvents = audioManagerEvents; - amState = AudioManagerState.RUNNING; - - // Store current audio state so we can restore it when stop() is called. - savedAudioMode = audioManager.getMode(); - savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn(); - savedIsMicrophoneMute = audioManager.isMicrophoneMute(); - hasWiredHeadset = hasWiredHeadset(); - - // Create an AudioManager.OnAudioFocusChangeListener instance. - audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() { - // Called on the listener to notify if the audio focus for this listener has been changed. - // The |focusChange| value indicates whether the focus was gained, whether the focus was lost, - // and whether that loss is transient, or whether the new focus holder will hold it for an - // unknown amount of time. - // TODO(henrika): possibly extend support of handling audio-focus changes. Only contains - // logging for now. - @Override - public void onAudioFocusChange(int focusChange) { - final String typeOfChange; - switch (focusChange) { - case AudioManager.AUDIOFOCUS_GAIN: - typeOfChange = "AUDIOFOCUS_GAIN"; - break; - case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT: - typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT"; - break; - case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE: - typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE"; - break; - case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK: - typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK"; - break; - case AudioManager.AUDIOFOCUS_LOSS: - typeOfChange = "AUDIOFOCUS_LOSS"; - break; - case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: - typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT"; - break; - case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK: - typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK"; - break; - default: - typeOfChange = "AUDIOFOCUS_INVALID"; - break; - } - Log.d(TAG, "onAudioFocusChange: " + typeOfChange); - } - }; - - // Request audio playout focus (without ducking) and install listener for changes in focus. - int result = audioManager.requestAudioFocus(audioFocusChangeListener, - AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); - if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { - Log.d(TAG, "Audio focus request granted for VOICE_CALL streams"); - } else { - Log.e(TAG, "Audio focus request failed"); - } - - // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is - // required to be in this mode when playout and/or recording starts for - // best possible VoIP performance. - audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); - - // Always disable microphone mute during a WebRTC call. - setMicrophoneMute(false); - - // Set initial device states. - userSelectedAudioDevice = AudioDevice.NONE; - selectedAudioDevice = AudioDevice.NONE; - audioDevices.clear(); - - // Initialize and start Bluetooth if a BT device is available or initiate - // detection of new (enabled) BT devices. - bluetoothManager.start(); - - // Do initial selection of audio device. This setting can later be changed - // either by adding/removing a BT or wired headset or by covering/uncovering - // the proximity sensor. - updateAudioDeviceState(); - - // Register receiver for broadcast intents related to adding/removing a - // wired headset. - registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG)); - Log.d(TAG, "AudioManager started"); - } - - @SuppressWarnings("deprecation") // TODO(henrika): audioManager.abandonAudioFocus() is deprecated. - public void stop() { - Log.d(TAG, "stop"); - ThreadUtils.checkIsOnMainThread(); - if (amState != AudioManagerState.RUNNING) { - Log.e(TAG, "Trying to stop AudioManager in incorrect state: " + amState); - return; - } - amState = AudioManagerState.UNINITIALIZED; - - unregisterReceiver(wiredHeadsetReceiver); - - bluetoothManager.stop(); - - // Restore previously stored audio states. - setSpeakerphoneOn(savedIsSpeakerPhoneOn); - setMicrophoneMute(savedIsMicrophoneMute); - audioManager.setMode(savedAudioMode); - - // Abandon audio focus. Gives the previous focus owner, if any, focus. - audioManager.abandonAudioFocus(audioFocusChangeListener); - audioFocusChangeListener = null; - Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams"); - - if (proximitySensor != null) { - proximitySensor.stop(); - proximitySensor = null; - } - - audioManagerEvents = null; - Log.d(TAG, "AudioManager stopped"); - } - - /** Changes selection of the currently active audio device. */ - private void setAudioDeviceInternal(AudioDevice device) { - Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")"); - RTCUtils.assertIsTrue(audioDevices.contains(device)); - - switch (device) { - case SPEAKER_PHONE: - setSpeakerphoneOn(true); - break; - case EARPIECE: - setSpeakerphoneOn(false); - break; - case WIRED_HEADSET: - setSpeakerphoneOn(false); - break; - case BLUETOOTH: - setSpeakerphoneOn(false); - break; - default: - Log.e(TAG, "Invalid audio device selection"); - break; - } - selectedAudioDevice = device; - } - - /** - * Changes default audio device. - * TODO(henrika): add usage of this method in the RTCMobile client. - */ - public void setDefaultAudioDevice(AudioDevice defaultDevice) { - ThreadUtils.checkIsOnMainThread(); - switch (defaultDevice) { - case SPEAKER_PHONE: - defaultAudioDevice = defaultDevice; - break; - case EARPIECE: - if (hasEarpiece()) { - defaultAudioDevice = defaultDevice; - } else { - defaultAudioDevice = AudioDevice.SPEAKER_PHONE; - } - break; - default: - Log.e(TAG, "Invalid default audio device selection"); - break; - } - Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")"); - updateAudioDeviceState(); - } - - /** Changes selection of the currently active audio device. */ - public void selectAudioDevice(AudioDevice device) { - ThreadUtils.checkIsOnMainThread(); - if (!audioDevices.contains(device)) { - Log.e(TAG, "Can not select " + device + " from available " + audioDevices); - } - userSelectedAudioDevice = device; - updateAudioDeviceState(); - } - - /** Returns current set of available/selectable audio devices. */ - public Set getAudioDevices() { - ThreadUtils.checkIsOnMainThread(); - return Collections.unmodifiableSet(new HashSet<>(audioDevices)); - } - - /** Returns the currently selected audio device. */ - public AudioDevice getSelectedAudioDevice() { - ThreadUtils.checkIsOnMainThread(); - return selectedAudioDevice; - } - - /** Helper method for receiver registration. */ - private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) { - appContext.registerReceiver(receiver, filter); - } - - /** Helper method for unregistration of an existing receiver. */ - private void unregisterReceiver(BroadcastReceiver receiver) { - appContext.unregisterReceiver(receiver); - } - - /** Sets the speaker phone mode. */ - public void setSpeakerphoneOn(boolean on) { - boolean wasOn = audioManager.isSpeakerphoneOn(); - if (wasOn == on) { - return; - } - audioManager.setSpeakerphoneOn(on); - } - - /** Sets the microphone mute state. */ - public void setMicrophoneMute(boolean on) { - boolean wasMuted = audioManager.isMicrophoneMute(); - if (wasMuted == on) { - return; - } - audioManager.setMicrophoneMute(on); - } - - /** Gets the current earpiece state. */ - private boolean hasEarpiece() { - return appContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY); - } - - /** - * Checks whether a wired headset is connected or not. - * This is not a valid indication that audio playback is actually over - * the wired headset as audio routing depends on other conditions. We - * only use it as an early indicator (during initialization) of an attached - * wired headset. - */ - @Deprecated - private boolean hasWiredHeadset() { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { - return audioManager.isWiredHeadsetOn(); - } else { - final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL); - for (AudioDeviceInfo device : devices) { - final int type = device.getType(); - if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) { - Log.d(TAG, "hasWiredHeadset: found wired headset"); - return true; - } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) { - Log.d(TAG, "hasWiredHeadset: found USB audio device"); - return true; - } - } - return false; - } - } - - /** - * Updates list of possible audio devices and make new device selection. - * TODO(henrika): add unit test to verify all state transitions. - */ - public void updateAudioDeviceState() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "--- updateAudioDeviceState: " - + "wired headset=" + hasWiredHeadset + ", " - + "BT state=" + bluetoothManager.getState()); - Log.d(TAG, "Device status: " - + "available=" + audioDevices + ", " - + "selected=" + selectedAudioDevice + ", " - + "user selected=" + userSelectedAudioDevice); - - // Check if any Bluetooth headset is connected. The internal BT state will - // change accordingly. - // TODO(henrika): perhaps wrap required state into BT manager. - if (bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE - || bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_UNAVAILABLE - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_DISCONNECTING) { - bluetoothManager.updateDevice(); - } - - // Update the set of available audio devices. - Set newAudioDevices = new HashSet<>(); - - if (bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTING - || bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE) { - newAudioDevices.add(AudioDevice.BLUETOOTH); - } - - if (hasWiredHeadset) { - // If a wired headset is connected, then it is the only possible option. - newAudioDevices.add(AudioDevice.WIRED_HEADSET); - } else { - // No wired headset, hence the audio-device list can contain speaker - // phone (on a tablet), or speaker phone and earpiece (on mobile phone). - newAudioDevices.add(AudioDevice.SPEAKER_PHONE); - if (hasEarpiece()) { - newAudioDevices.add(AudioDevice.EARPIECE); - } - } - // Store state which is set to true if the device list has changed. - boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices); - // Update the existing audio device set. - audioDevices = newAudioDevices; - // Correct user selected audio devices if needed. - if (bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_UNAVAILABLE - && userSelectedAudioDevice == AudioDevice.BLUETOOTH) { - // If BT is not available, it can't be the user selection. - userSelectedAudioDevice = AudioDevice.NONE; - } - if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) { - // If user selected speaker phone, but then plugged wired headset then make - // wired headset as user selected device. - userSelectedAudioDevice = AudioDevice.WIRED_HEADSET; - } - if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) { - // If user selected wired headset, but then unplugged wired headset then make - // speaker phone as user selected device. - userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE; - } - - // Need to start Bluetooth if it is available and user either selected it explicitly or - // user did not select any output device. - boolean needBluetoothAudioStart = - bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE - && (userSelectedAudioDevice == AudioDevice.NONE - || userSelectedAudioDevice == AudioDevice.BLUETOOTH); - - // Need to stop Bluetooth audio if user selected different device and - // Bluetooth SCO connection is established or in the process. - boolean needBluetoothAudioStop = - (bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTING) - && (userSelectedAudioDevice != AudioDevice.NONE - && userSelectedAudioDevice != AudioDevice.BLUETOOTH); - - if (bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTING - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED) { - Log.d(TAG, "Need BT audio: start=" + needBluetoothAudioStart + ", " - + "stop=" + needBluetoothAudioStop + ", " - + "BT state=" + bluetoothManager.getState()); - } - - // Start or stop Bluetooth SCO connection given states set earlier. - if (needBluetoothAudioStop) { - bluetoothManager.stopScoAudio(); - bluetoothManager.updateDevice(); - } - - if (needBluetoothAudioStart && !needBluetoothAudioStop) { - // Attempt to start Bluetooth SCO audio (takes a few second to start). - if (!bluetoothManager.startScoAudio()) { - // Remove BLUETOOTH from list of available devices since SCO failed. - audioDevices.remove(AudioDevice.BLUETOOTH); - audioDeviceSetUpdated = true; - } - } - - // Update selected audio device. - final AudioDevice newAudioDevice; - - if (bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED) { - // If a Bluetooth is connected, then it should be used as output audio - // device. Note that it is not sufficient that a headset is available; - // an active SCO channel must also be up and running. - newAudioDevice = AudioDevice.BLUETOOTH; - } else if (hasWiredHeadset) { - // If a wired headset is connected, but Bluetooth is not, then wired headset is used as - // audio device. - newAudioDevice = AudioDevice.WIRED_HEADSET; - } else { - // No wired headset and no Bluetooth, hence the audio-device list can contain speaker - // phone (on a tablet), or speaker phone and earpiece (on mobile phone). - // |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE - // depending on the user's selection. - newAudioDevice = defaultAudioDevice; - } - // Switch to new device but only if there has been any changes. - if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) { - // Do the required device switch. - setAudioDeviceInternal(newAudioDevice); - Log.d(TAG, "New device status: " - + "available=" + audioDevices + ", " - + "selected=" + newAudioDevice); - if (audioManagerEvents != null) { - // Notify a listening client that audio device has been changed. - audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices); - } - } - Log.d(TAG, "--- updateAudioDeviceState done"); - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCBluetoothManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCBluetoothManager.java deleted file mode 100644 index 2cd72d6987..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCBluetoothManager.java +++ /dev/null @@ -1,531 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.annotation.SuppressLint; -import android.bluetooth.BluetoothAdapter; -import android.bluetooth.BluetoothDevice; -import android.bluetooth.BluetoothHeadset; -import android.bluetooth.BluetoothProfile; -import android.content.BroadcastReceiver; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.content.pm.PackageManager; -import android.media.AudioManager; -import android.os.Handler; -import android.os.Looper; -import android.os.Process; -import android.util.Log; -import java.util.List; -import java.util.Set; -import com.cloudwebrtc.webrtc.utils.RTCUtils; -import org.webrtc.ThreadUtils; - -/** - * RTCProximitySensor manages functions related to Bluetoth devices in the - * RTC demo. - */ -public class RTCBluetoothManager { - private static final String TAG = "RTCBluetoothManager"; - - // Timeout interval for starting or stopping audio to a Bluetooth SCO device. - private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000; - // Maximum number of SCO connection attempts. - private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2; - - // Bluetooth connection state. - public enum State { - // Bluetooth is not available; no adapter or Bluetooth is off. - UNINITIALIZED, - // Bluetooth error happened when trying to start Bluetooth. - ERROR, - // Bluetooth proxy object for the Headset profile exists, but no connected headset devices, - // SCO is not started or disconnected. - HEADSET_UNAVAILABLE, - // Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset - // present, but SCO is not started or disconnected. - HEADSET_AVAILABLE, - // Bluetooth audio SCO connection with remote device is closing. - SCO_DISCONNECTING, - // Bluetooth audio SCO connection with remote device is initiated. - SCO_CONNECTING, - // Bluetooth audio SCO connection with remote device is established. - SCO_CONNECTED - } - - private final Context apprtcContext; - private final RTCAudioManager apprtcAudioManager; - - private final AudioManager audioManager; - private final Handler handler; - - int scoConnectionAttempts; - private State bluetoothState; - private final BluetoothProfile.ServiceListener bluetoothServiceListener; - - private BluetoothAdapter bluetoothAdapter; - - private BluetoothHeadset bluetoothHeadset; - - private BluetoothDevice bluetoothDevice; - private final BroadcastReceiver bluetoothHeadsetReceiver; - - // Runs when the Bluetooth timeout expires. We use that timeout after calling - // startScoAudio() or stopScoAudio() because we're not guaranteed to get a - // callback after those calls. - private final Runnable bluetoothTimeoutRunnable = new Runnable() { - @Override - public void run() { - bluetoothTimeout(); - } - }; - - /** - * Implementation of an interface that notifies BluetoothProfile IPC clients when they have been - * connected to or disconnected from the service. - */ - private class BluetoothServiceListener implements BluetoothProfile.ServiceListener { - @Override - // Called to notify the client when the proxy object has been connected to the service. - // Once we have the profile proxy object, we can use it to monitor the state of the - // connection and perform other operations that are relevant to the headset profile. - public void onServiceConnected(int profile, BluetoothProfile proxy) { - if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) { - return; - } - Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState); - // Android only supports one connected Bluetooth Headset at a time. - bluetoothHeadset = (BluetoothHeadset) proxy; - updateAudioDeviceState(); - Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState); - } - - @Override - /** Notifies the client when the proxy object has been disconnected from the service. */ - public void onServiceDisconnected(int profile) { - if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) { - return; - } - Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState); - stopScoAudio(); - bluetoothHeadset = null; - bluetoothDevice = null; - bluetoothState = State.HEADSET_UNAVAILABLE; - updateAudioDeviceState(); - Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState); - } - } - - // Intent broadcast receiver which handles changes in Bluetooth device availability. - // Detects headset changes and Bluetooth SCO state changes. - private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver { - @Override - public void onReceive(Context context, Intent intent) { - if (bluetoothState == State.UNINITIALIZED) { - return; - } - final String action = intent.getAction(); - // Change in connection state of the Headset profile. Note that the - // change does not tell us anything about whether we're streaming - // audio to BT over SCO. Typically received when user turns on a BT - // headset while audio is active using another audio device. - if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) { - final int state = - intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED); - Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: " - + "a=ACTION_CONNECTION_STATE_CHANGED, " - + "s=" + stateToString(state) + ", " - + "sb=" + isInitialStickyBroadcast() + ", " - + "BT state: " + bluetoothState); - if (state == BluetoothHeadset.STATE_CONNECTED) { - scoConnectionAttempts = 0; - updateAudioDeviceState(); - } else if (state == BluetoothHeadset.STATE_CONNECTING) { - // No action needed. - } else if (state == BluetoothHeadset.STATE_DISCONNECTING) { - // No action needed. - } else if (state == BluetoothHeadset.STATE_DISCONNECTED) { - // Bluetooth is probably powered off during the call. - stopScoAudio(); - updateAudioDeviceState(); - } - // Change in the audio (SCO) connection state of the Headset profile. - // Typically received after call to startScoAudio() has finalized. - } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) { - final int state = intent.getIntExtra( - BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED); - Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: " - + "a=ACTION_AUDIO_STATE_CHANGED, " - + "s=" + stateToString(state) + ", " - + "sb=" + isInitialStickyBroadcast() + ", " - + "BT state: " + bluetoothState); - if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) { - cancelTimer(); - if (bluetoothState == State.SCO_CONNECTING) { - Log.d(TAG, "+++ Bluetooth audio SCO is now connected"); - bluetoothState = State.SCO_CONNECTED; - scoConnectionAttempts = 0; - updateAudioDeviceState(); - } else { - Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED"); - } - } else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) { - Log.d(TAG, "+++ Bluetooth audio SCO is now connecting..."); - } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) { - Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected"); - if (isInitialStickyBroadcast()) { - Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast."); - return; - } - updateAudioDeviceState(); - } - } - Log.d(TAG, "onReceive done: BT state=" + bluetoothState); - } - } - - /** Construction. */ - static RTCBluetoothManager create(Context context, RTCAudioManager audioManager) { - Log.d(TAG, "create" + RTCUtils.getThreadInfo()); - return new RTCBluetoothManager(context, audioManager); - } - - protected RTCBluetoothManager(Context context, RTCAudioManager audioManager) { - Log.d(TAG, "ctor"); - ThreadUtils.checkIsOnMainThread(); - apprtcContext = context; - apprtcAudioManager = audioManager; - this.audioManager = getAudioManager(context); - bluetoothState = State.UNINITIALIZED; - bluetoothServiceListener = new BluetoothServiceListener(); - bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver(); - handler = new Handler(Looper.getMainLooper()); - } - - /** Returns the internal state. */ - public State getState() { - ThreadUtils.checkIsOnMainThread(); - return bluetoothState; - } - - /** - * Activates components required to detect Bluetooth devices and to enable - * BT SCO (audio is routed via BT SCO) for the headset profile. The end - * state will be HEADSET_UNAVAILABLE but a state machine has started which - * will start a state change sequence where the final outcome depends on - * if/when the BT headset is enabled. - * Example of state change sequence when start() is called while BT device - * is connected and enabled: - * UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE --> - * SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO. - * Note that the RTCAudioManager is also involved in driving this state - * change. - */ - public void start() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "start"); - if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) { - Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission"); - return; - } - if (bluetoothState != State.UNINITIALIZED) { - Log.w(TAG, "Invalid BT state"); - return; - } - bluetoothHeadset = null; - bluetoothDevice = null; - scoConnectionAttempts = 0; - // Get a handle to the default local Bluetooth adapter. - bluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); - if (bluetoothAdapter == null) { - Log.w(TAG, "Device does not support Bluetooth"); - return; - } - // Ensure that the device supports use of BT SCO audio for off call use cases. - if (!audioManager.isBluetoothScoAvailableOffCall()) { - Log.e(TAG, "Bluetooth SCO audio is not available off call"); - return; - } - logBluetoothAdapterInfo(bluetoothAdapter); - // Establish a connection to the HEADSET profile (includes both Bluetooth Headset and - // Hands-Free) proxy object and install a listener. - if (!getBluetoothProfileProxy( - apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) { - Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed"); - return; - } - // Register receivers for BluetoothHeadset change notifications. - IntentFilter bluetoothHeadsetFilter = new IntentFilter(); - // Register receiver for change in connection state of the Headset profile. - bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); - // Register receiver for change in audio connection state of the Headset profile. - bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED); - registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter); - Log.d(TAG, "HEADSET profile state: " - + stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET))); - Log.d(TAG, "Bluetooth proxy for headset profile has started"); - bluetoothState = State.HEADSET_UNAVAILABLE; - Log.d(TAG, "start done: BT state=" + bluetoothState); - } - - /** Stops and closes all components related to Bluetooth audio. */ - public void stop() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "stop: BT state=" + bluetoothState); - if (bluetoothAdapter == null) { - return; - } - // Stop BT SCO connection with remote device if needed. - stopScoAudio(); - // Close down remaining BT resources. - if (bluetoothState == State.UNINITIALIZED) { - return; - } - unregisterReceiver(bluetoothHeadsetReceiver); - cancelTimer(); - if (bluetoothHeadset != null) { - bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset); - bluetoothHeadset = null; - } - bluetoothAdapter = null; - bluetoothDevice = null; - bluetoothState = State.UNINITIALIZED; - Log.d(TAG, "stop done: BT state=" + bluetoothState); - } - - /** - * Starts Bluetooth SCO connection with remote device. - * Note that the phone application always has the priority on the usage of the SCO connection - * for telephony. If this method is called while the phone is in call it will be ignored. - * Similarly, if a call is received or sent while an application is using the SCO connection, - * the connection will be lost for the application and NOT returned automatically when the call - * ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a - * virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO - * audio connection is established. - * TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and - * higher. It might be required to initiates a virtual voice call since many devices do not - * accept SCO audio without a "call". - */ - public boolean startScoAudio() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "startSco: BT state=" + bluetoothState + ", " - + "attempts: " + scoConnectionAttempts + ", " - + "SCO is on: " + isScoOn()); - if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) { - Log.e(TAG, "BT SCO connection fails - no more attempts"); - return false; - } - if (bluetoothState != State.HEADSET_AVAILABLE) { - Log.e(TAG, "BT SCO connection fails - no headset available"); - return false; - } - // Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED. - Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED..."); - // The SCO connection establishment can take several seconds, hence we cannot rely on the - // connection to be available when the method returns but instead register to receive the - // intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED. - bluetoothState = State.SCO_CONNECTING; - audioManager.startBluetoothSco(); - audioManager.setBluetoothScoOn(true); - scoConnectionAttempts++; - startTimer(); - Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", " - + "SCO is on: " + isScoOn()); - return true; - } - - /** Stops Bluetooth SCO connection with remote device. */ - public void stopScoAudio() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", " - + "SCO is on: " + isScoOn()); - if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) { - return; - } - cancelTimer(); - audioManager.stopBluetoothSco(); - audioManager.setBluetoothScoOn(false); - bluetoothState = State.SCO_DISCONNECTING; - Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", " - + "SCO is on: " + isScoOn()); - } - - /** - * Use the BluetoothHeadset proxy object (controls the Bluetooth Headset - * Service via IPC) to update the list of connected devices for the HEADSET - * profile. The internal state will change to HEADSET_UNAVAILABLE or to - * HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected - * device if available. - */ - public void updateDevice() { - if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) { - return; - } - Log.d(TAG, "updateDevice"); - // Get connected devices for the headset profile. Returns the set of - // devices which are in state STATE_CONNECTED. The BluetoothDevice class - // is just a thin wrapper for a Bluetooth hardware address. - List devices = bluetoothHeadset.getConnectedDevices(); - if (devices.isEmpty()) { - bluetoothDevice = null; - bluetoothState = State.HEADSET_UNAVAILABLE; - Log.d(TAG, "No connected bluetooth headset"); - } else { - // Always use first device in list. Android only supports one device. - bluetoothDevice = devices.get(0); - bluetoothState = State.HEADSET_AVAILABLE; - Log.d(TAG, "Connected bluetooth headset: " - + "name=" + bluetoothDevice.getName() + ", " - + "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice)) - + ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice)); - } - Log.d(TAG, "updateDevice done: BT state=" + bluetoothState); - } - - /** - * Stubs for test mocks. - */ - - protected AudioManager getAudioManager(Context context) { - return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); - } - - protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) { - apprtcContext.registerReceiver(receiver, filter); - } - - protected void unregisterReceiver(BroadcastReceiver receiver) { - apprtcContext.unregisterReceiver(receiver); - } - - protected boolean getBluetoothProfileProxy( - Context context, BluetoothProfile.ServiceListener listener, int profile) { - return bluetoothAdapter.getProfileProxy(context, listener, profile); - } - - protected boolean hasPermission(Context context, String permission) { - return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid()) - == PackageManager.PERMISSION_GRANTED; - } - - /** Logs the state of the local Bluetooth adapter. */ - @SuppressLint("HardwareIds") - protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) { - Log.d(TAG, "BluetoothAdapter: " - + "enabled=" + localAdapter.isEnabled() + ", " - + "state=" + stateToString(localAdapter.getState()) + ", " - + "name=" + localAdapter.getName() + ", " - + "address=" + localAdapter.getAddress()); - // Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter. - Set pairedDevices = localAdapter.getBondedDevices(); - if (!pairedDevices.isEmpty()) { - Log.d(TAG, "paired devices:"); - for (BluetoothDevice device : pairedDevices) { - Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress()); - } - } - } - - /** Ensures that the audio manager updates its list of available audio devices. */ - private void updateAudioDeviceState() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "updateAudioDeviceState"); - apprtcAudioManager.updateAudioDeviceState(); - } - - /** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */ - private void startTimer() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "startTimer"); - handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS); - } - - /** Cancels any outstanding timer tasks. */ - private void cancelTimer() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "cancelTimer"); - handler.removeCallbacks(bluetoothTimeoutRunnable); - } - - /** - * Called when start of the BT SCO channel takes too long time. Usually - * happens when the BT device has been turned on during an ongoing call. - */ - private void bluetoothTimeout() { - ThreadUtils.checkIsOnMainThread(); - if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) { - return; - } - Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", " - + "attempts: " + scoConnectionAttempts + ", " - + "SCO is on: " + isScoOn()); - if (bluetoothState != State.SCO_CONNECTING) { - return; - } - // Bluetooth SCO should be connecting; check the latest result. - boolean scoConnected = false; - List devices = bluetoothHeadset.getConnectedDevices(); - if (devices.size() > 0) { - bluetoothDevice = devices.get(0); - if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) { - Log.d(TAG, "SCO connected with " + bluetoothDevice.getName()); - scoConnected = true; - } else { - Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName()); - } - } - if (scoConnected) { - // We thought BT had timed out, but it's actually on; updating state. - bluetoothState = State.SCO_CONNECTED; - scoConnectionAttempts = 0; - } else { - // Give up and "cancel" our request by calling stopBluetoothSco(). - Log.w(TAG, "BT failed to connect after timeout"); - stopScoAudio(); - } - updateAudioDeviceState(); - Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState); - } - - /** Checks whether audio uses Bluetooth SCO. */ - private boolean isScoOn() { - return audioManager.isBluetoothScoOn(); - } - - /** Converts BluetoothAdapter states into local string representations. */ - private String stateToString(int state) { - switch (state) { - case BluetoothAdapter.STATE_DISCONNECTED: - return "DISCONNECTED"; - case BluetoothAdapter.STATE_CONNECTED: - return "CONNECTED"; - case BluetoothAdapter.STATE_CONNECTING: - return "CONNECTING"; - case BluetoothAdapter.STATE_DISCONNECTING: - return "DISCONNECTING"; - case BluetoothAdapter.STATE_OFF: - return "OFF"; - case BluetoothAdapter.STATE_ON: - return "ON"; - case BluetoothAdapter.STATE_TURNING_OFF: - // Indicates the local Bluetooth adapter is turning off. Local clients should immediately - // attempt graceful disconnection of any remote links. - return "TURNING_OFF"; - case BluetoothAdapter.STATE_TURNING_ON: - // Indicates the local Bluetooth adapter is turning on. However local clients should wait - // for STATE_ON before attempting to use the adapter. - return "TURNING_ON"; - default: - return "INVALID"; - } - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCProximitySensor.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCProximitySensor.java deleted file mode 100644 index 6cfa3bb4a4..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCProximitySensor.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.content.Context; -import android.hardware.Sensor; -import android.hardware.SensorEvent; -import android.hardware.SensorEventListener; -import android.hardware.SensorManager; -import android.os.Build; -import android.util.Log; -import com.cloudwebrtc.webrtc.utils.RTCUtils; -import org.webrtc.ThreadUtils; - -/** - * RTCProximitySensor manages functions related to the proximity sensor in - * the RTC demo. - * On most device, the proximity sensor is implemented as a boolean-sensor. - * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX - * value i.e. the LUX value of the light sensor is compared with a threshold. - * A LUX-value more than the threshold means the proximity sensor returns "FAR". - * Anything less than the threshold value and the sensor returns "NEAR". - */ -public class RTCProximitySensor implements SensorEventListener { - private static final String TAG = "RTCProximitySensor"; - - // This class should be created, started and stopped on one thread - // (e.g. the main thread). We use |nonThreadSafe| to ensure that this is - // the case. Only active when |DEBUG| is set to true. - private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); - - private final Runnable onSensorStateListener; - private final SensorManager sensorManager; - private Sensor proximitySensor; - private boolean lastStateReportIsNear; - - /** Construction */ - static RTCProximitySensor create(Context context, Runnable sensorStateListener) { - return new RTCProximitySensor(context, sensorStateListener); - } - - private RTCProximitySensor(Context context, Runnable sensorStateListener) { - Log.d(TAG, "RTCProximitySensor" + RTCUtils.getThreadInfo()); - onSensorStateListener = sensorStateListener; - sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE)); - } - - /** - * Activate the proximity sensor. Also do initialization if called for the - * first time. - */ - public boolean start() { - threadChecker.checkIsOnValidThread(); - Log.d(TAG, "start" + RTCUtils.getThreadInfo()); - if (!initDefaultSensor()) { - // Proximity sensor is not supported on this device. - return false; - } - sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL); - return true; - } - - /** Deactivate the proximity sensor. */ - public void stop() { - threadChecker.checkIsOnValidThread(); - Log.d(TAG, "stop" + RTCUtils.getThreadInfo()); - if (proximitySensor == null) { - return; - } - sensorManager.unregisterListener(this, proximitySensor); - } - - /** Getter for last reported state. Set to true if "near" is reported. */ - public boolean sensorReportsNearState() { - threadChecker.checkIsOnValidThread(); - return lastStateReportIsNear; - } - - @Override - public final void onAccuracyChanged(Sensor sensor, int accuracy) { - threadChecker.checkIsOnValidThread(); - RTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY); - if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) { - Log.e(TAG, "The values returned by this sensor cannot be trusted"); - } - } - - @Override - public final void onSensorChanged(SensorEvent event) { - threadChecker.checkIsOnValidThread(); - RTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY); - // As a best practice; do as little as possible within this method and - // avoid blocking. - float distanceInCentimeters = event.values[0]; - if (distanceInCentimeters < proximitySensor.getMaximumRange()) { - Log.d(TAG, "Proximity sensor => NEAR state"); - lastStateReportIsNear = true; - } else { - Log.d(TAG, "Proximity sensor => FAR state"); - lastStateReportIsNear = false; - } - - // Report about new state to listening client. Client can then call - // sensorReportsNearState() to query the current state (NEAR or FAR). - if (onSensorStateListener != null) { - onSensorStateListener.run(); - } - - Log.d(TAG, "onSensorChanged" + RTCUtils.getThreadInfo() + ": " - + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance=" - + event.values[0]); - } - - /** - * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7) - * does not support this type of sensor and false will be returned in such - * cases. - */ - private boolean initDefaultSensor() { - if (proximitySensor != null) { - return true; - } - proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY); - if (proximitySensor == null) { - return false; - } - logProximitySensorInfo(); - return true; - } - - /** Helper method for logging information about the proximity sensor. */ - private void logProximitySensorInfo() { - if (proximitySensor == null) { - return; - } - StringBuilder info = new StringBuilder("Proximity sensor: "); - info.append("name=").append(proximitySensor.getName()); - info.append(", vendor: ").append(proximitySensor.getVendor()); - info.append(", power: ").append(proximitySensor.getPower()); - info.append(", resolution: ").append(proximitySensor.getResolution()); - info.append(", max range: ").append(proximitySensor.getMaximumRange()); - info.append(", min delay: ").append(proximitySensor.getMinDelay()); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) { - // Added in API level 20. - info.append(", type: ").append(proximitySensor.getStringType()); - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - // Added in API level 21. - info.append(", max delay: ").append(proximitySensor.getMaxDelay()); - info.append(", reporting mode: ").append(proximitySensor.getReportingMode()); - info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor()); - } - Log.d(TAG, info.toString()); - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCUtils.java deleted file mode 100644 index 1b55ebdf80..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCUtils.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.os.Build; -import android.util.Log; - -/** - * RTCUtils provides helper functions for managing thread safety. - */ -public final class RTCUtils { - private RTCUtils() {} - - /** Helper method which throws an exception when an assertion has failed. */ - public static void assertIsTrue(boolean condition) { - if (!condition) { - throw new AssertionError("Expected condition to be true"); - } - } - - /** Helper method for building a string of thread information.*/ - public static String getThreadInfo() { - return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() - + "]"; - } - - /** Information about the current build, taken from system properties. */ - public static void logDeviceInfo(String tag) { - Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", " - + "Release: " + Build.VERSION.RELEASE + ", " - + "Brand: " + Build.BRAND + ", " - + "Device: " + Build.DEVICE + ", " - + "Id: " + Build.ID + ", " - + "Hardware: " + Build.HARDWARE + ", " - + "Manufacturer: " + Build.MANUFACTURER + ", " - + "Model: " + Build.MODEL + ", " - + "Product: " + Build.PRODUCT); - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java new file mode 100644 index 0000000000..b990ca4e24 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java @@ -0,0 +1,80 @@ +package com.cloudwebrtc.webrtc.utils; + +import androidx.annotation.Nullable; + +import org.webrtc.PeerConnection; + +public class Utils { + + @Nullable + static public String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { + switch (iceConnectionState) { + case NEW: + return "new"; + case CHECKING: + return "checking"; + case CONNECTED: + return "connected"; + case COMPLETED: + return "completed"; + case FAILED: + return "failed"; + case DISCONNECTED: + return "disconnected"; + case CLOSED: + return "closed"; + } + return null; + } + + @Nullable + static public String iceGatheringStateString(PeerConnection.IceGatheringState iceGatheringState) { + switch (iceGatheringState) { + case NEW: + return "new"; + case GATHERING: + return "gathering"; + case COMPLETE: + return "complete"; + } + return null; + } + + @Nullable + static public String signalingStateString(PeerConnection.SignalingState signalingState) { + switch (signalingState) { + case STABLE: + return "stable"; + case HAVE_LOCAL_OFFER: + return "have-local-offer"; + case HAVE_LOCAL_PRANSWER: + return "have-local-pranswer"; + case HAVE_REMOTE_OFFER: + return "have-remote-offer"; + case HAVE_REMOTE_PRANSWER: + return "have-remote-pranswer"; + case CLOSED: + return "closed"; + } + return null; + } + + @Nullable + static public String connectionStateString(PeerConnection.PeerConnectionState connectionState) { + switch (connectionState) { + case NEW: + return "new"; + case CONNECTING: + return "connecting"; + case CONNECTED: + return "connected"; + case DISCONNECTED: + return "disconnected"; + case FAILED: + return "failed"; + case CLOSED: + return "closed"; + } + return null; + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/LocalVideoTrack.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/LocalVideoTrack.java new file mode 100644 index 0000000000..fde5a75f04 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/LocalVideoTrack.java @@ -0,0 +1,67 @@ +package com.cloudwebrtc.webrtc.video; + +import androidx.annotation.Nullable; + +import com.cloudwebrtc.webrtc.LocalTrack; + +import org.webrtc.VideoFrame; +import org.webrtc.VideoProcessor; +import org.webrtc.VideoSink; +import org.webrtc.VideoTrack; + +import java.util.ArrayList; +import java.util.List; + +public class LocalVideoTrack extends LocalTrack implements VideoProcessor { + public interface ExternalVideoFrameProcessing { + /** + * Process a video frame. + * @param frame + * @return The processed video frame. + */ + public abstract VideoFrame onFrame(VideoFrame frame); + } + + public LocalVideoTrack(VideoTrack videoTrack) { + super(videoTrack); + } + + List processors = new ArrayList<>(); + + public void addProcessor(ExternalVideoFrameProcessing processor) { + synchronized (processors) { + processors.add(processor); + } + } + + public void removeProcessor(ExternalVideoFrameProcessing processor) { + synchronized (processors) { + processors.remove(processor); + } + } + + private VideoSink sink = null; + + @Override + public void setSink(@Nullable VideoSink videoSink) { + sink = videoSink; + } + + @Override + public void onCapturerStarted(boolean b) {} + + @Override + public void onCapturerStopped() {} + + @Override + public void onFrameCaptured(VideoFrame videoFrame) { + if (sink != null) { + synchronized (processors) { + for (ExternalVideoFrameProcessing processor : processors) { + videoFrame = processor.onFrame(videoFrame); + } + } + sink.onFrame(videoFrame); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/VideoCapturerInfo.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/VideoCapturerInfo.java new file mode 100644 index 0000000000..8d93e61578 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/VideoCapturerInfo.java @@ -0,0 +1,12 @@ +package com.cloudwebrtc.webrtc.video; + +import org.webrtc.VideoCapturer; + +public class VideoCapturerInfo { + public VideoCapturer capturer; + public int width; + public int height; + public int fps; + public boolean isScreenCapture = false; + public String cameraName; +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraRegionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraRegionUtils.java new file mode 100644 index 0000000000..62581de564 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraRegionUtils.java @@ -0,0 +1,205 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package com.cloudwebrtc.webrtc.video.camera; + +import android.annotation.TargetApi; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.util.Size; +import androidx.annotation.NonNull; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import java.util.Arrays; + +/** + * Utility class offering functions to calculate values regarding the camera boundaries. + * + *

The functions are used to calculate focus and exposure settings. + */ +public final class CameraRegionUtils { + + @NonNull + public static Size getCameraBoundaries( + @NonNull CameraCharacteristics cameraCharacteristics, @NonNull CaptureRequest.Builder requestBuilder) { + if (SdkCapabilityChecker.supportsDistortionCorrection() + && supportsDistortionCorrection(cameraCharacteristics)) { + // Get the current distortion correction mode. + Integer distortionCorrectionMode = + requestBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE); + + // Return the correct boundaries depending on the mode. + android.graphics.Rect rect; + if (distortionCorrectionMode == null + || distortionCorrectionMode == CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) { + rect = getSensorInfoPreCorrectionActiveArraySize(cameraCharacteristics); + } else { + rect = getSensorInfoActiveArraySize(cameraCharacteristics); + } + + return SizeFactory.create(rect.width(), rect.height()); + } else { + // No distortion correction support. + return getSensorInfoPixelArraySize(cameraCharacteristics); + } + } + + @TargetApi(Build.VERSION_CODES.P) + private static boolean supportsDistortionCorrection(CameraCharacteristics cameraCharacteristics) { + int[] availableDistortionCorrectionModes = getDistortionCorrectionAvailableModes(cameraCharacteristics); + if (availableDistortionCorrectionModes == null) { + availableDistortionCorrectionModes = new int[0]; + } + long nonOffModesSupported = + Arrays.stream(availableDistortionCorrectionModes) + .filter((value) -> value != CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) + .count(); + return nonOffModesSupported > 0; + } + + static public int[] getDistortionCorrectionAvailableModes(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + return cameraCharacteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES); + } + return null; + } + + public static Rect getSensorInfoActiveArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + } + + public static Size getSensorInfoPixelArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + } + + @NonNull + public static Rect getSensorInfoPreCorrectionActiveArraySize(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return cameraCharacteristics.get( + CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); + } + return getSensorInfoActiveArraySize(cameraCharacteristics); + } + + public static Integer getControlMaxRegionsAutoExposure(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE); + } + + /** + * Converts a point into a {@link MeteringRectangle} with the supplied coordinates as the center + * point. + * + *

Since the Camera API (due to cross-platform constraints) only accepts a point when + * configuring a specific focus or exposure area and Android requires a rectangle to configure + * these settings there is a need to convert the point into a rectangle. This method will create + * the required rectangle with an arbitrarily size that is a 10th of the current viewport and the + * coordinates as the center point. + * + * @param boundaries - The camera boundaries to calculate the metering rectangle for. + * @param x x - 1 >= coordinate >= 0. + * @param y y - 1 >= coordinate >= 0. + * @return The dimensions of the metering rectangle based on the supplied coordinates and + * boundaries. + */ + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + /** Factory class that assists in creating a {@link MeteringRectangle} instance. */ + static class MeteringRectangleFactory { + /** + * Creates a new instance of the {@link MeteringRectangle} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param x coordinate >= 0. + * @param y coordinate >= 0. + * @param width width >= 0. + * @param height height >= 0. + * @param meteringWeight weight between {@value MeteringRectangle#METERING_WEIGHT_MIN} and + * {@value MeteringRectangle#METERING_WEIGHT_MAX} inclusively. + * @return new instance of the {@link MeteringRectangle} class. + * @throws IllegalArgumentException if any of the parameters were negative. + */ + @VisibleForTesting + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } + + /** Factory class that assists in creating a {@link Size} instance. */ + static class SizeFactory { + /** + * Creates a new instance of the {@link Size} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param width width >= 0. + * @param height height >= 0. + * @return new instance of the {@link Size} class. + */ + @VisibleForTesting + public static Size create(int width, int height) { + return new Size(width, height); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraUtils.java new file mode 100644 index 0000000000..12802ce1d0 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraUtils.java @@ -0,0 +1,730 @@ +package com.cloudwebrtc.webrtc.video.camera; + +import android.app.Activity; +import android.graphics.Rect; +import android.hardware.Camera; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.os.Handler; +import android.util.Log; +import android.util.Range; +import android.util.Size; +import android.view.Surface; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.cloudwebrtc.webrtc.GetUserMediaImpl; +import com.cloudwebrtc.webrtc.utils.AnyThreadResult; +import com.cloudwebrtc.webrtc.video.VideoCapturerInfo; + +import org.webrtc.Camera1Capturer; +import org.webrtc.Camera2Capturer; +import org.webrtc.CameraEnumerationAndroid; + +import java.lang.reflect.Field; +import java.util.List; + +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel; + +public class CameraUtils { + private static final String TAG = "CameraUtils"; + Activity activity; + private GetUserMediaImpl getUserMediaImpl; + private boolean isTorchOn = false; + private DeviceOrientationManager deviceOrientationManager; + public CameraUtils(GetUserMediaImpl getUserMediaImpl, Activity activity) { + this.getUserMediaImpl = getUserMediaImpl; + this.activity = activity; + this.deviceOrientationManager = new DeviceOrientationManager(activity, 0); + // commented out because you cannot register a reciever when the app is terminated + // because the activity is null? + // this causes the call to break if the app is terminated + // the manager seems to end up at handleOrientationChange which does not do + // anything at the moment so this should be ok + + // TODO: get a proper fix at some point + // this.deviceOrientationManager.start(); + } + + public void setFocusMode(MethodCall call, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + switch (mode) { + case "locked": + // When locking the auto-focus the camera device should do a one-time focus and afterwards + // set the auto-focus to idle. This is accomplished by setting the CONTROL_AF_MODE to + // CONTROL_AF_MODE_AUTO. + captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); + break; + case "auto": + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); + break; + default: + break; + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + + //captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(!params.getSupportedFocusModes().isEmpty()) { + switch (mode) { + case "locked": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); + break; + case "auto": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); + break; + default: + break; + } + result.success(null); + return; + } + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setFocusPoint(MethodCall call, Point focusPoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + MeteringRectangle focusRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + focusRectangle = + convertPointToMeteringRectangle(cameraBoundaries, focusPoint.x, focusPoint.y, orientation); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_REGIONS, + captureRequestBuilder == null ? null : new MeteringRectangle[] {focusRectangle}); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + + result.success(null); + return; + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setExposureMode(MethodCall call, AnyThreadResult result) {} + + public void setExposurePoint(MethodCall call,Point exposurePoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setExposurePoint", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setExposurePoint", "[setExposurePoint] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + if(CameraRegionUtils.getControlMaxRegionsAutoExposure(cameraCharacteristics) <= 0) { + resultError("setExposurePoint", "[setExposurePoint] Camera does not support auto exposure", result); + return; + } + + MeteringRectangle exposureRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + exposureRectangle = + convertPointToMeteringRectangle(cameraBoundaries, exposurePoint.x, exposurePoint.y, orientation); + if (exposureRectangle != null) { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {exposureRectangle}); + } else { + MeteringRectangle[] defaultRegions = captureRequestBuilder.get(CaptureRequest.CONTROL_AE_REGIONS); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, defaultRegions); + } + + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void hasTorch(String trackId, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("hasTorch", "Video capturer not found for id: " + trackId, result); + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && info.capturer instanceof Camera2Capturer) { + CameraManager manager; + CameraDevice cameraDevice; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + boolean flashIsAvailable; + try { + CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraDevice.getId()); + flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(flashIsAvailable); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + List supportedModes = params.getSupportedFlashModes(); + + result.success( + supportedModes != null && supportedModes.contains(Camera.Parameters.FLASH_MODE_TORCH)); + return; + } + + resultError("hasTorch", "[TORCH] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setZoom(String trackId, double zoomLevel, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setZoom", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + final double maxZoomLevel = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); + + final double desiredZoomLevel = Math.max(1.0, Math.min(zoomLevel, maxZoomLevel)); + + float ratio = 1.0f / (float)desiredZoomLevel; + + if (rect != null) { + int croppedWidth = rect.width() - Math.round((float) rect.width() * ratio); + int croppedHeight = rect.height() - Math.round((float) rect.height() * ratio); + final Rect desiredRegion = new Rect(croppedWidth / 2, croppedHeight / 2, rect.width() - croppedWidth / 2, rect.height() - croppedHeight / 2); + captureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, desiredRegion); + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(params.isZoomSupported()) { + int maxZoom = params.getMaxZoom(); + double desiredZoom = Math.max(0, Math.min(zoomLevel, maxZoom)); + params.setZoom((int)desiredZoom); + result.success(null); + return; + } + } + resultError("setZoom", "[ZOOM] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setTorch(String trackId, boolean torch, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setTorch", "Video capturer not found for id: " + trackId, result); + return; + } + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + CameraManager manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(null); + isTorchOn = torch; + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + + result.success(null); + isTorchOn = torch; + return; + } + resultError("setTorch", "[TORCH] Video capturer not compatible", result); + } + + + private class NoSuchFieldWithNameException extends NoSuchFieldException { + + String className; + String fieldName; + + NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { + super(e.getMessage()); + this.className = className; + this.fieldName = fieldName; + } + } + static private void resultError(String method, String error, MethodChannel.Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + private Object getPrivateProperty(Class klass, Object object, String fieldName) + throws NoSuchFieldWithNameException { + try { + Field field = klass.getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(object); + } catch (NoSuchFieldException e) { + throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); + } catch (IllegalAccessException e) { + // Should never happen since we are calling `setAccessible(true)` + throw new RuntimeException(e); + } + } + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + static class MeteringRectangleFactory { + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } +} + diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/DeviceOrientationManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/DeviceOrientationManager.java new file mode 100644 index 0000000000..c533291893 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/DeviceOrientationManager.java @@ -0,0 +1,188 @@ +package com.cloudwebrtc.webrtc.video.camera; + +import android.app.Activity; +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.res.Configuration; +import android.view.Display; +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation; + +/** + * Support class to help to determine the media orientation based on the orientation of the device. + */ +public class DeviceOrientationManager { + + private static final IntentFilter orientationIntentFilter = + new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED); + + private final Activity activity; + private final int sensorOrientation; + private PlatformChannel.DeviceOrientation lastOrientation; + private BroadcastReceiver broadcastReceiver; + + /** Factory method to create a device orientation manager. */ + @NonNull + public static DeviceOrientationManager create( + @NonNull Activity activity, + int sensorOrientation) { + return new DeviceOrientationManager(activity, sensorOrientation); + } + + DeviceOrientationManager( + @NonNull Activity activity, + int sensorOrientation) { + this.activity = activity; + this.sensorOrientation = sensorOrientation; + } + + public void start() { + if (broadcastReceiver != null) { + return; + } + broadcastReceiver = + new BroadcastReceiver() { + @Override + public void onReceive(Context context, Intent intent) { + handleUIOrientationChange(); + } + }; + activity.registerReceiver(broadcastReceiver, orientationIntentFilter); + broadcastReceiver.onReceive(activity, null); + } + + /** Stops listening for orientation updates. */ + public void stop() { + if (broadcastReceiver == null) { + return; + } + activity.unregisterReceiver(broadcastReceiver); + broadcastReceiver = null; + } + + + /** @return the last received UI orientation. */ + @Nullable + public PlatformChannel.DeviceOrientation getLastUIOrientation() { + return this.lastOrientation; + } + + /** + * Handles orientation changes based on change events triggered by the OrientationIntentFilter. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + */ + @VisibleForTesting + void handleUIOrientationChange() { + PlatformChannel.DeviceOrientation orientation = getUIOrientation(); + handleOrientationChange(orientation, lastOrientation); + lastOrientation = orientation; + } + @VisibleForTesting + static void handleOrientationChange( + DeviceOrientation newOrientation, + DeviceOrientation previousOrientation) { + } + + @SuppressWarnings("deprecation") + @VisibleForTesting + PlatformChannel.DeviceOrientation getUIOrientation() { + final int rotation = getDisplay().getRotation(); + final int orientation = activity.getResources().getConfiguration().orientation; + + switch (orientation) { + case Configuration.ORIENTATION_PORTRAIT: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } else { + return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN; + } + case Configuration.ORIENTATION_LANDSCAPE: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT; + } else { + return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT; + } + case Configuration.ORIENTATION_SQUARE: + case Configuration.ORIENTATION_UNDEFINED: + default: + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } + } + + /** + * Calculates the sensor orientation based on the supplied angle. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @param angle Orientation angle. + * @return The sensor orientation based on the supplied angle. + */ + @VisibleForTesting + PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) { + final int tolerance = 45; + angle += tolerance; + + // Orientation is 0 in the default orientation mode. This is portrait-mode for phones + // and landscape for tablets. We have to compensate for this by calculating the default + // orientation, and apply an offset accordingly. + int defaultDeviceOrientation = getDeviceDefaultOrientation(); + if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) { + angle += 90; + } + // Determine the orientation + angle = angle % 360; + return new PlatformChannel.DeviceOrientation[] { + PlatformChannel.DeviceOrientation.PORTRAIT_UP, + PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT, + PlatformChannel.DeviceOrientation.PORTRAIT_DOWN, + PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT, + } + [angle / 90]; + } + + /** + * Gets the default orientation of the device. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return The default orientation of the device. + */ + @VisibleForTesting + int getDeviceDefaultOrientation() { + Configuration config = activity.getResources().getConfiguration(); + int rotation = getDisplay().getRotation(); + if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) + && config.orientation == Configuration.ORIENTATION_LANDSCAPE) + || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) + && config.orientation == Configuration.ORIENTATION_PORTRAIT)) { + return Configuration.ORIENTATION_LANDSCAPE; + } else { + return Configuration.ORIENTATION_PORTRAIT; + } + } + + /** + * Gets an instance of the Android {@link android.view.Display}. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return An instance of the Android {@link android.view.Display}. + */ + @SuppressWarnings("deprecation") + @VisibleForTesting + Display getDisplay() { + return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/Point.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/Point.java new file mode 100644 index 0000000000..83ab8e653d --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/Point.java @@ -0,0 +1,14 @@ +package com.cloudwebrtc.webrtc.video.camera; + +import androidx.annotation.Nullable; + +/** Represents a point on an x/y axis. */ +public class Point { + public final Double x; + public final Double y; + + public Point(@Nullable Double x, @Nullable Double y) { + this.x = x; + this.y = y; + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/SdkCapabilityChecker.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/SdkCapabilityChecker.java new file mode 100644 index 0000000000..cd7d21ef3f --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/SdkCapabilityChecker.java @@ -0,0 +1,60 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package com.cloudwebrtc.webrtc.video.camera; + +import android.annotation.SuppressLint; +import android.os.Build; +import androidx.annotation.ChecksSdkIntAtLeast; +import androidx.annotation.VisibleForTesting; + +/** Abstracts SDK version checks, and allows overriding them in unit tests. */ +public class SdkCapabilityChecker { + /** The current SDK version, overridable for testing. */ + @SuppressLint("AnnotateVersionCheck") + @VisibleForTesting + public static int SDK_VERSION = Build.VERSION.SDK_INT; + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsDistortionCorrection() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.O) + public static boolean supportsEglRecordableAndroid() { + // See https://developer.android.com/reference/android/opengl/EGLExt#EGL_RECORDABLE_ANDROID + return SDK_VERSION >= Build.VERSION_CODES.O; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.S) + public static boolean supportsEncoderProfiles() { + // See https://developer.android.com/reference/android/media/EncoderProfiles + return SDK_VERSION >= Build.VERSION_CODES.S; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.M) + public static boolean supportsMarshmallowNoiseReductionModes() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES + return SDK_VERSION >= Build.VERSION_CODES.M; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsSessionConfiguration() { + // See https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.N) + public static boolean supportsVideoPause() { + // See https://developer.android.com/reference/androidx/camera/video/VideoRecordEvent.Pause + return SDK_VERSION >= Build.VERSION_CODES.N; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.R) + public static boolean supportsZoomRatio() { + // See https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#CONTROL_ZOOM_RATIO + return SDK_VERSION >= Build.VERSION_CODES.R; + } +} diff --git a/android/src/main/java/org/webrtc/Camera1Helper.java b/android/src/main/java/org/webrtc/Camera1Helper.java new file mode 100644 index 0000000000..f0dec0d8de --- /dev/null +++ b/android/src/main/java/org/webrtc/Camera1Helper.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023-2024 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +/** + * A helper to access package-protected methods used in [Camera2Session] + *

+ * Note: cameraId as used in the Camera1XXX classes refers to the index within the list of cameras. + * + * @suppress + */ + +public class Camera1Helper { + + public static int getCameraId(String deviceName) { + return Camera1Enumerator.getCameraIndex(deviceName); + } + + @Nullable + public static List getSupportedFormats(int cameraId) { + return Camera1Enumerator.getSupportedFormats(cameraId); + } + + public static Size findClosestCaptureFormat(int cameraId, int width, int height) { + List formats = getSupportedFormats(cameraId); + + List sizes = new ArrayList<>(); + if (formats != null) { + for (CameraEnumerationAndroid.CaptureFormat format : formats) { + sizes.add(new Size(format.width, format.height)); + } + } + + return CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + } +} diff --git a/android/src/main/java/org/webrtc/Camera2Helper.java b/android/src/main/java/org/webrtc/Camera2Helper.java new file mode 100644 index 0000000000..eab20edb2e --- /dev/null +++ b/android/src/main/java/org/webrtc/Camera2Helper.java @@ -0,0 +1,51 @@ +/* + * Copyright 2023-2024 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import android.hardware.camera2.CameraManager; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +/** + * A helper to access package-protected methods used in [Camera2Session] + *

+ * Note: cameraId as used in the Camera2XXX classes refers to the id returned + * by [CameraManager.getCameraIdList]. + */ +public class Camera2Helper { + + @Nullable + public static List getSupportedFormats(CameraManager cameraManager, @Nullable String cameraId) { + return Camera2Enumerator.getSupportedFormats(cameraManager, cameraId); + } + + public static Size findClosestCaptureFormat(CameraManager cameraManager, @Nullable String cameraId, int width, int height) { + List formats = getSupportedFormats(cameraManager, cameraId); + + List sizes = new ArrayList<>(); + if (formats != null) { + for (CameraEnumerationAndroid.CaptureFormat format : formats) { + sizes.add(new Size(format.width, format.height)); + } + } + + return CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + } +} diff --git a/android/src/main/java/org/webrtc/video/CustomVideoDecoderFactory.java b/android/src/main/java/org/webrtc/video/CustomVideoDecoderFactory.java new file mode 100644 index 0000000000..531314ac3e --- /dev/null +++ b/android/src/main/java/org/webrtc/video/CustomVideoDecoderFactory.java @@ -0,0 +1,55 @@ +package org.webrtc.video; + +import androidx.annotation.Nullable; + +import org.webrtc.EglBase; +import org.webrtc.SoftwareVideoDecoderFactory; +import org.webrtc.VideoCodecInfo; +import org.webrtc.VideoDecoder; +import org.webrtc.VideoDecoderFactory; +import org.webrtc.WrappedVideoDecoderFactory; + +import java.util.ArrayList; +import java.util.List; + +public class CustomVideoDecoderFactory implements VideoDecoderFactory { + private SoftwareVideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + private WrappedVideoDecoderFactory wrappedVideoDecoderFactory; + private boolean forceSWCodec = false; + + private List forceSWCodecs = new ArrayList<>(); + + public CustomVideoDecoderFactory(EglBase.Context sharedContext) { + this.wrappedVideoDecoderFactory = new WrappedVideoDecoderFactory(sharedContext); + } + + public void setForceSWCodec(boolean forceSWCodec) { + this.forceSWCodec = forceSWCodec; + } + + public void setForceSWCodecList(List forceSWCodecs) { + this.forceSWCodecs = forceSWCodecs; + } + + @Nullable + @Override + public VideoDecoder createDecoder(VideoCodecInfo videoCodecInfo) { + if(forceSWCodec) { + return softwareVideoDecoderFactory.createDecoder(videoCodecInfo); + } + if(!forceSWCodecs.isEmpty()) { + if(forceSWCodecs.contains(videoCodecInfo.name)) { + return softwareVideoDecoderFactory.createDecoder(videoCodecInfo); + } + } + return wrappedVideoDecoderFactory.createDecoder(videoCodecInfo); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + if(forceSWCodec && forceSWCodecs.isEmpty()) { + return softwareVideoDecoderFactory.getSupportedCodecs(); + } + return wrappedVideoDecoderFactory.getSupportedCodecs(); + } +} diff --git a/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java b/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java new file mode 100644 index 0000000000..772b3f936c --- /dev/null +++ b/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java @@ -0,0 +1,61 @@ +package org.webrtc.video; + +import androidx.annotation.Nullable; + +import com.cloudwebrtc.webrtc.SimulcastVideoEncoderFactoryWrapper; + +import org.webrtc.EglBase; +import org.webrtc.SoftwareVideoEncoderFactory; +import org.webrtc.VideoCodecInfo; +import org.webrtc.VideoEncoder; +import org.webrtc.VideoEncoderFactory; + +import java.util.ArrayList; +import java.util.List; + +public class CustomVideoEncoderFactory implements VideoEncoderFactory { + private SoftwareVideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory(); + private SimulcastVideoEncoderFactoryWrapper simulcastVideoEncoderFactoryWrapper; + + private boolean forceSWCodec = false; + + private List forceSWCodecs = new ArrayList<>(); + + public CustomVideoEncoderFactory(EglBase.Context sharedContext, + boolean enableIntelVp8Encoder, + boolean enableH264HighProfile) { + this.simulcastVideoEncoderFactoryWrapper = new SimulcastVideoEncoderFactoryWrapper(sharedContext, enableIntelVp8Encoder, enableH264HighProfile); + } + + public void setForceSWCodec(boolean forceSWCodec) { + this.forceSWCodec = forceSWCodec; + } + + public void setForceSWCodecList(List forceSWCodecs) { + this.forceSWCodecs = forceSWCodecs; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo videoCodecInfo) { + if(forceSWCodec) { + return softwareVideoEncoderFactory.createEncoder(videoCodecInfo); + } + + if(!forceSWCodecs.isEmpty()) { + if(forceSWCodecs.contains(videoCodecInfo.name)) { + return softwareVideoEncoderFactory.createEncoder(videoCodecInfo); + } + } + + return simulcastVideoEncoderFactoryWrapper.createEncoder(videoCodecInfo); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + if(forceSWCodec && forceSWCodecs.isEmpty()) { + return softwareVideoEncoderFactory.getSupportedCodecs(); + } + return simulcastVideoEncoderFactoryWrapper.getSupportedCodecs(); + } +} diff --git a/assets/sponsors/stream-logo.png b/assets/sponsors/stream-logo.png new file mode 100644 index 0000000000..671eea96df Binary files /dev/null and b/assets/sponsors/stream-logo.png differ diff --git a/common/cpp/include/flutter_common.h b/common/cpp/include/flutter_common.h new file mode 100644 index 0000000000..50e6097bf9 --- /dev/null +++ b/common/cpp/include/flutter_common.h @@ -0,0 +1,188 @@ +#ifndef FLUTTER_WEBRTC_COMMON_HXX +#define FLUTTER_WEBRTC_COMMON_HXX + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +typedef flutter::EncodableValue EncodableValue; +typedef flutter::EncodableMap EncodableMap; +typedef flutter::EncodableList EncodableList; +typedef flutter::BinaryMessenger BinaryMessenger; +typedef flutter::TextureRegistrar TextureRegistrar; +typedef flutter::PluginRegistrar PluginRegistrar; +typedef flutter::MethodChannel MethodChannel; +typedef flutter::EventChannel EventChannel; +typedef flutter::EventSink EventSink; +typedef flutter::MethodCall MethodCall; +typedef flutter::MethodResult MethodResult; + +class TaskRunner; + +// foo.StringValue() becomes std::get(foo) +// foo.IsString() becomes std::holds_alternative(foo) + +template +inline bool TypeIs(const EncodableValue val) { + return std::holds_alternative(val); +} + +template +inline const T GetValue(EncodableValue val) { + return std::get(val); +} + +inline EncodableValue findEncodableValue(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end()) + return it->second; + return EncodableValue(); +} + +inline EncodableMap findMap(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return EncodableMap(); +} + +inline EncodableList findList(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return EncodableList(); +} + +inline std::string findString(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return std::string(); +} + +inline int findInt(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return -1; +} + +inline bool findBoolean(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return false; +} + +inline double findDouble(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return 0.0; +} + +inline std::optional maybeFindDouble(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return std::nullopt; +} + +inline std::vector findVector(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs>(it->second)) + return GetValue>(it->second); + return std::vector(); +} + +inline int64_t findLongInt(const EncodableMap& map, const std::string& key) { + for (auto it : map) { + if (key == GetValue(it.first)) { + if (TypeIs(it.second)) { + return GetValue(it.second); + } else if (TypeIs(it.second)) { + return GetValue(it.second); + } + } + } + + return -1; +} + +inline int toInt(flutter::EncodableValue inputVal, int defaultVal) { + int intValue = defaultVal; + if (TypeIs(inputVal)) { + intValue = GetValue(inputVal); + } else if (TypeIs(inputVal)) { + intValue = GetValue(inputVal); + } else if (TypeIs(inputVal)) { + intValue = atoi(GetValue(inputVal).c_str()); + } + return intValue; +} + +class MethodCallProxy { + public: + static std::unique_ptr Create(const MethodCall& call); + virtual ~MethodCallProxy() = default; + // The name of the method being called. + virtual const std::string& method_name() const = 0; + + // The arguments to the method call, or NULL if there are none. + virtual const EncodableValue* arguments() const = 0; +}; + +class MethodResultProxy { + public: + static std::unique_ptr Create( + std::unique_ptr method_result); + + virtual ~MethodResultProxy() = default; + + // Reports success with no result. + virtual void Success() = 0; + + // Reports success with a result. + virtual void Success(const EncodableValue& result) = 0; + + // Reports an error. + virtual void Error(const std::string& error_code, + const std::string& error_message, + const EncodableValue& error_details) = 0; + + // Reports an error with a default error code and no details. + virtual void Error(const std::string& error_code, + const std::string& error_message = "") = 0; + + virtual void NotImplemented() = 0; +}; + +class EventChannelProxy { + public: + static std::unique_ptr Create( + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName); + + virtual ~EventChannelProxy() = default; + + virtual void Success(const EncodableValue& event, + bool cache_event = true) = 0; +}; + +#endif // FLUTTER_WEBRTC_COMMON_HXX diff --git a/common/cpp/include/flutter_data_channel.h b/common/cpp/include/flutter_data_channel.h new file mode 100644 index 0000000000..1e5bfd1584 --- /dev/null +++ b/common/cpp/include/flutter_data_channel.h @@ -0,0 +1,58 @@ +#ifndef FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX +#define FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +namespace flutter_webrtc_plugin { + +class FlutterRTCDataChannelObserver : public RTCDataChannelObserver { + public: + FlutterRTCDataChannelObserver(scoped_refptr data_channel, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channel_name); + virtual ~FlutterRTCDataChannelObserver(); + + virtual void OnStateChange(RTCDataChannelState state) override; + + virtual void OnMessage(const char* buffer, int length, bool binary) override; + + scoped_refptr data_channel() { return data_channel_; } + + private: + std::unique_ptr event_channel_; + scoped_refptr data_channel_; +}; + +class FlutterDataChannel { + public: + FlutterDataChannel(FlutterWebRTCBase* base) : base_(base) {} + + void CreateDataChannel(const std::string& peerConnectionId, + const std::string& label, + const EncodableMap& dataChannelDict, + RTCPeerConnection* pc, + std::unique_ptr); + + void DataChannelSend(RTCDataChannel* data_channel, + const std::string& type, + const EncodableValue& data, + std::unique_ptr); + + void DataChannelGetBufferedAmount(RTCDataChannel* data_channel, + std::unique_ptr result); + + void DataChannelClose(RTCDataChannel* data_channel, + const std::string& data_channel_uuid, + std::unique_ptr); + + RTCDataChannel* DataChannelForId(const std::string& id); + + private: + FlutterWebRTCBase* base_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_frame_capturer.h b/common/cpp/include/flutter_frame_capturer.h new file mode 100644 index 0000000000..41e9a6556e --- /dev/null +++ b/common/cpp/include/flutter_frame_capturer.h @@ -0,0 +1,37 @@ +#ifndef FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX +#define FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_video_frame.h" +#include "rtc_video_renderer.h" + +#include + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterFrameCapturer + : public RTCVideoRenderer> { + public: + FlutterFrameCapturer(RTCVideoTrack* track, std::string path); + + virtual void OnFrame(scoped_refptr frame) override; + + void CaptureFrame(std::unique_ptr result); + + private: + RTCVideoTrack* track_; + std::string path_; + std::mutex mutex_; + scoped_refptr frame_; + volatile bool catch_frame_; + + bool SaveFrame(); +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_frame_cryptor.h b/common/cpp/include/flutter_frame_cryptor.h new file mode 100644 index 0000000000..36756272f9 --- /dev/null +++ b/common/cpp/include/flutter_frame_cryptor.h @@ -0,0 +1,103 @@ +#ifndef FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX +#define FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_frame_cryptor.h" + +namespace flutter_webrtc_plugin { + +class FlutterFrameCryptorObserver : public libwebrtc::RTCFrameCryptorObserver { + public: + FlutterFrameCryptorObserver(BinaryMessenger* messenger, TaskRunner* task_runner, const std::string& channelName) + : event_channel_(EventChannelProxy::Create(messenger, task_runner, channelName)) {} + void OnFrameCryptionStateChanged( + const string participant_id, + libwebrtc::RTCFrameCryptionState state); + private: + std::unique_ptr event_channel_; +}; + +class FlutterFrameCryptor { + public: + FlutterFrameCryptor(FlutterWebRTCBase* base) : base_(base) {} + + // Since this takes ownership of result, ownership will be passed back to 'outResult' if this function fails + bool HandleFrameCryptorMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result, + std::unique_ptr *outResult); + + void FrameCryptorFactoryCreateFrameCryptor( + const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorSetKeyIndex(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorGetKeyIndex(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorSetEnabled(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorGetEnabled(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorDispose(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorFactoryCreateKeyProvider( + const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetSharedKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderRatchetSharedKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderExportSharedKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderRatchetKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderExportKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetSifTrailer(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderDispose(const EncodableMap& constraints, + std::unique_ptr result); + + // std::unique_ptr result); + // 'keyProviderSetKey', + // 'keyProviderSetKeys', + // 'keyProviderGetKeys', + // 'keyProviderDispose', + // 'frameCryptorFactoryCreateFrameCryptor', + // 'frameCryptorFactoryCreateKeyProvider', + // 'frameCryptorSetKeyIndex', + // 'frameCryptorGetKeyIndex', + // 'frameCryptorSetEnabled', + // 'frameCryptorGetEnabled', + // 'frameCryptorDispose', + + private: + FlutterWebRTCBase* base_; + std::map> + frame_cryptors_; + std::map> + frame_cryptor_observers_; + std::map> key_providers_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX diff --git a/common/cpp/include/flutter_media_stream.h b/common/cpp/include/flutter_media_stream.h new file mode 100644 index 0000000000..8139a56174 --- /dev/null +++ b/common/cpp/include/flutter_media_stream.h @@ -0,0 +1,57 @@ +#ifndef FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX +#define FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +namespace flutter_webrtc_plugin { + +class FlutterMediaStream { + public: + FlutterMediaStream(FlutterWebRTCBase* base); + + void GetUserMedia(const EncodableMap& constraints, + std::unique_ptr result); + + void GetUserAudio(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params); + + void GetUserVideo(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params); + + void GetSources(std::unique_ptr result); + + void SelectAudioOutput(const std::string& device_id, + std::unique_ptr result); + + void SelectAudioInput(const std::string& device_id, + std::unique_ptr result); + + void MediaStreamGetTracks(const std::string& stream_id, + std::unique_ptr result); + + void MediaStreamDispose(const std::string& stream_id, + std::unique_ptr result); + + void MediaStreamTrackSetEnable(const std::string& track_id, + std::unique_ptr result); + + void MediaStreamTrackSwitchCamera(const std::string& track_id, + std::unique_ptr result); + + void MediaStreamTrackDispose(const std::string& track_id, + std::unique_ptr result); + + void CreateLocalMediaStream(std::unique_ptr result); + + void OnDeviceChange(); + + private: + FlutterWebRTCBase* base_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX diff --git a/common/cpp/include/flutter_peerconnection.h b/common/cpp/include/flutter_peerconnection.h new file mode 100644 index 0000000000..699823dfdc --- /dev/null +++ b/common/cpp/include/flutter_peerconnection.h @@ -0,0 +1,209 @@ +#ifndef FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX +#define FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +namespace flutter_webrtc_plugin { + +class FlutterPeerConnectionObserver : public RTCPeerConnectionObserver { + public: + FlutterPeerConnectionObserver(FlutterWebRTCBase* base, + scoped_refptr peerconnection, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channel_name, + std::string& peerConnectionId); + + virtual void OnSignalingState(RTCSignalingState state) override; + virtual void OnPeerConnectionState(RTCPeerConnectionState state) override; + virtual void OnIceGatheringState(RTCIceGatheringState state) override; + virtual void OnIceConnectionState(RTCIceConnectionState state) override; + virtual void OnIceCandidate( + scoped_refptr candidate) override; + virtual void OnAddStream(scoped_refptr stream) override; + virtual void OnRemoveStream(scoped_refptr stream) override; + + virtual void OnTrack(scoped_refptr transceiver) override; + virtual void OnAddTrack(vector> streams, + scoped_refptr receiver) override; + virtual void OnRemoveTrack(scoped_refptr receiver) override; + virtual void OnDataChannel( + scoped_refptr data_channel) override; + virtual void OnRenegotiationNeeded() override; + + scoped_refptr MediaStreamForId(const std::string& id); + + scoped_refptr MediaTrackForId(const std::string& id); + + void RemoveStreamForId(const std::string& id); + + private: + std::unique_ptr event_channel_; + scoped_refptr peerconnection_; + std::map> remote_streams_; + FlutterWebRTCBase* base_; + std::string id_; +}; + +class FlutterPeerConnection { + public: + FlutterPeerConnection(FlutterWebRTCBase* base) : base_(base) {} + + void CreateRTCPeerConnection(const EncodableMap& configuration, + const EncodableMap& constraints, + std::unique_ptr result); + + void RTCPeerConnectionClose(RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result); + + void RTCPeerConnectionDispose(RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result); + + void CreateOffer(const EncodableMap& constraints, + RTCPeerConnection* pc, + std::unique_ptr result); + + void CreateAnswer(const EncodableMap& constraints, + RTCPeerConnection* pc, + std::unique_ptr result); + + void SetLocalDescription(RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result); + + void SetRemoteDescription(RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result); + + void GetLocalDescription(RTCPeerConnection* pc, + std::unique_ptr result); + + void GetRemoteDescription(RTCPeerConnection* pc, + std::unique_ptr result); + + scoped_refptr mapToRtpTransceiverInit( + const EncodableMap& transceiverInit); + + RTCRtpTransceiverDirection stringToTransceiverDirection( + std::string direction); + + libwebrtc::scoped_refptr mapToEncoding( + const EncodableMap& parameters); + + void AddTransceiver(RTCPeerConnection* pc, + const std::string& trackId, + const std::string& mediaType, + const EncodableMap& transceiverInit, + std::unique_ptr result); + + void GetTransceivers(RTCPeerConnection* pc, + std::unique_ptr result); + + void GetReceivers(RTCPeerConnection* pc, + std::unique_ptr result); + + void RtpSenderSetTrack(RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result); + + void RtpSenderSetStream(RTCPeerConnection* pc, + std::vector streamIds, + std::string rtpSenderId, + std::unique_ptr result); + + void RtpSenderReplaceTrack(RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result); + + scoped_refptr updateRtpParameters( + EncodableMap newParameters, + scoped_refptr parameters); + + void RtpSenderSetParameters(RTCPeerConnection* pc, + std::string rtpSenderId, + const EncodableMap& parameters, + std::unique_ptr result); + + void RtpTransceiverStop(RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result); + + void RtpTransceiverGetCurrentDirection( + RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result); + + void SetConfiguration(RTCPeerConnection* pc, + const EncodableMap& configuration, + std::unique_ptr result); + + void CaptureFrame(RTCVideoTrack* track, + std::string path, + std::unique_ptr result); + + scoped_refptr getRtpTransceiverById(RTCPeerConnection* pc, + std::string id); + + void RtpTransceiverSetDirection(RTCPeerConnection* pc, + std::string transceiverId, + std::string direction, + std::unique_ptr result); + + void RtpTransceiverSetCodecPreferences( + RTCPeerConnection* pc, + std::string transceiverId, + const EncodableList codecs, + std::unique_ptr result); + + void GetSenders(RTCPeerConnection* pc, + std::unique_ptr result); + + void AddIceCandidate(RTCIceCandidate* candidate, + RTCPeerConnection* pc, + std::unique_ptr result); + + void GetStats(const std::string& track_id, + RTCPeerConnection* pc, + std::unique_ptr result); + + void MediaStreamAddTrack(scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result); + + void MediaStreamRemoveTrack(scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result); + + void AddTrack(RTCPeerConnection* pc, + scoped_refptr track, + std::vector streamIds, + std::unique_ptr result); + + void RemoveTrack(RTCPeerConnection* pc, + std::string senderId, + std::unique_ptr result); + + private: + FlutterWebRTCBase* base_; +}; + +std::string RTCMediaTypeToString(RTCMediaType type); + +std::string transceiverDirectionString(RTCRtpTransceiverDirection direction); + +const char* iceConnectionStateString(RTCIceConnectionState state); + +const char* signalingStateString(RTCSignalingState state); + +const char* peerConnectionStateString(RTCPeerConnectionState state); + +const char* iceGatheringStateString(RTCIceGatheringState state); + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_screen_capture.h b/common/cpp/include/flutter_screen_capture.h new file mode 100644 index 0000000000..07b4501e5e --- /dev/null +++ b/common/cpp/include/flutter_screen_capture.h @@ -0,0 +1,60 @@ +#ifndef FLUTTER_SCRREN_CAPTURE_HXX +#define FLUTTER_SCRREN_CAPTURE_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_desktop_capturer.h" +#include "rtc_desktop_media_list.h" + +namespace flutter_webrtc_plugin { + +class FlutterScreenCapture : public MediaListObserver, + public DesktopCapturerObserver { + public: + FlutterScreenCapture(FlutterWebRTCBase* base); + + void GetDisplayMedia(const EncodableMap& constraints, + std::unique_ptr result); + + void GetDesktopSources(const EncodableList& types, + std::unique_ptr result); + + void UpdateDesktopSources(const EncodableList& types, + std::unique_ptr result); + + void GetDesktopSourceThumbnail(std::string source_id, + int width, + int height, + std::unique_ptr result); + + protected: + void OnMediaSourceAdded(scoped_refptr source) override; + + void OnMediaSourceRemoved(scoped_refptr source) override; + + void OnMediaSourceNameChanged(scoped_refptr source) override; + + void OnMediaSourceThumbnailChanged( + scoped_refptr source) override; + + void OnStart(scoped_refptr capturer) override; + + void OnPaused(scoped_refptr capturer) override; + + void OnStop(scoped_refptr capturer) override; + + void OnError(scoped_refptr capturer) override; + + private: + bool BuildDesktopSourcesList(const EncodableList& types, bool force_reload); + + private: + FlutterWebRTCBase* base_; + std::map> medialist_; + std::vector> sources_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // FLUTTER_SCRREN_CAPTURE_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_video_renderer.h b/common/cpp/include/flutter_video_renderer.h new file mode 100644 index 0000000000..b2454f8458 --- /dev/null +++ b/common/cpp/include/flutter_video_renderer.h @@ -0,0 +1,84 @@ +#ifndef FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX +#define FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_video_frame.h" +#include "rtc_video_renderer.h" + +#include + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterVideoRenderer + : public RTCVideoRenderer>, + public RefCountInterface { + public: + FlutterVideoRenderer() = default; + ~FlutterVideoRenderer(); + + void initialize(TextureRegistrar* registrar, + BinaryMessenger* messenger, + TaskRunner* task_runner, + std::unique_ptr texture, + int64_t texture_id); + + virtual const FlutterDesktopPixelBuffer* CopyPixelBuffer(size_t width, + size_t height) const; + + virtual void OnFrame(scoped_refptr frame) override; + + void SetVideoTrack(scoped_refptr track); + + int64_t texture_id() { return texture_id_; } + + bool CheckMediaStream(std::string mediaId); + + bool CheckVideoTrack(std::string mediaId); + + std::string media_stream_id; + + private: + struct FrameSize { + size_t width; + size_t height; + }; + FrameSize last_frame_size_ = {0, 0}; + bool first_frame_rendered = false; + TextureRegistrar* registrar_ = nullptr; + std::unique_ptr event_channel_; + int64_t texture_id_ = -1; + scoped_refptr track_ = nullptr; + scoped_refptr frame_; + std::unique_ptr texture_; + std::shared_ptr pixel_buffer_; + mutable std::shared_ptr rgb_buffer_; + mutable std::mutex mutex_; + RTCVideoFrame::VideoRotation rotation_ = RTCVideoFrame::kVideoRotation_0; +}; + +class FlutterVideoRendererManager { + public: + FlutterVideoRendererManager(FlutterWebRTCBase* base); + + void CreateVideoRendererTexture(std::unique_ptr result); + + void VideoRendererSetSrcObject(int64_t texture_id, + const std::string& stream_id, + const std::string& owner_tag, + const std::string& track_id); + + void VideoRendererDispose(int64_t texture_id, + std::unique_ptr result); + + private: + FlutterWebRTCBase* base_; + std::map> renderers_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_webrtc.h b/common/cpp/include/flutter_webrtc.h new file mode 100644 index 0000000000..573956b9aa --- /dev/null +++ b/common/cpp/include/flutter_webrtc.h @@ -0,0 +1,45 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_HXX +#define PLUGINS_FLUTTER_WEBRTC_HXX + +#include "flutter_common.h" + +#include "flutter_data_channel.h" +#include "flutter_frame_cryptor.h" +#include "flutter_media_stream.h" +#include "flutter_peerconnection.h" +#include "flutter_screen_capture.h" +#include "flutter_video_renderer.h" + +#include "libwebrtc.h" + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterWebRTCPlugin : public flutter::Plugin { + public: + virtual BinaryMessenger* messenger() = 0; + + virtual TextureRegistrar* textures() = 0; + + virtual TaskRunner* task_runner() = 0; +}; + +class FlutterWebRTC : public FlutterWebRTCBase, + public FlutterVideoRendererManager, + public FlutterMediaStream, + public FlutterPeerConnection, + public FlutterScreenCapture, + public FlutterDataChannel, + public FlutterFrameCryptor { + public: + FlutterWebRTC(FlutterWebRTCPlugin* plugin); + virtual ~FlutterWebRTC(); + + void HandleMethodCall(const MethodCallProxy& method_call, + std::unique_ptr result); +}; + +} // namespace flutter_webrtc_plugin + +#endif // PLUGINS_FLUTTER_WEBRTC_HXX diff --git a/common/cpp/include/flutter_webrtc_base.h b/common/cpp/include/flutter_webrtc_base.h new file mode 100644 index 0000000000..9edabc7680 --- /dev/null +++ b/common/cpp/include/flutter_webrtc_base.h @@ -0,0 +1,132 @@ +#ifndef FLUTTER_WEBRTC_BASE_HXX +#define FLUTTER_WEBRTC_BASE_HXX + +#include "flutter_common.h" + +#include +#include +#include +#include +#include + +#include "libwebrtc.h" + +#include "rtc_audio_device.h" +#include "rtc_desktop_device.h" +#include "rtc_dtmf_sender.h" +#include "rtc_media_stream.h" +#include "rtc_media_track.h" +#include "rtc_mediaconstraints.h" +#include "rtc_peerconnection.h" +#include "rtc_peerconnection_factory.h" +#include "rtc_video_device.h" + +#include "uuidxx.h" + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterVideoRenderer; +class FlutterRTCDataChannelObserver; +class FlutterPeerConnectionObserver; + +class FlutterWebRTCBase { + public: + friend class FlutterMediaStream; + friend class FlutterPeerConnection; + friend class FlutterVideoRendererManager; + friend class FlutterDataChannel; + friend class FlutterPeerConnectionObserver; + friend class FlutterScreenCapture; + friend class FlutterFrameCryptor; + enum ParseConstraintType { kMandatory, kOptional }; + + public: + FlutterWebRTCBase(BinaryMessenger* messenger, TextureRegistrar* textures, TaskRunner* task_runner); + ~FlutterWebRTCBase(); + + std::string GenerateUUID(); + + RTCPeerConnection* PeerConnectionForId(const std::string& id); + + void RemovePeerConnectionForId(const std::string& id); + + RTCMediaTrack* MediaTrackForId(const std::string& id); + + void RemoveMediaTrackForId(const std::string& id); + + FlutterPeerConnectionObserver* PeerConnectionObserversForId( + const std::string& id); + + void RemovePeerConnectionObserversForId(const std::string& id); + + scoped_refptr MediaStreamForId( + const std::string& id, + std::string ownerTag = std::string()); + + void RemoveStreamForId(const std::string& id); + + bool ParseConstraints(const EncodableMap& constraints, + RTCConfiguration* configuration); + + scoped_refptr ParseMediaConstraints( + const EncodableMap& constraints); + + bool ParseRTCConfiguration(const EncodableMap& map, + RTCConfiguration& configuration); + + scoped_refptr MediaTracksForId(const std::string& id); + + void RemoveTracksForId(const std::string& id); + + EventChannelProxy* event_channel(); + + + libwebrtc::scoped_refptr GetRtpSenderById( + RTCPeerConnection* pc, + std::string id); + + libwebrtc::scoped_refptr GetRtpReceiverById( + RTCPeerConnection* pc, + std::string id); + + private: + void ParseConstraints(const EncodableMap& src, + scoped_refptr mediaConstraints, + ParseConstraintType type = kMandatory); + + bool CreateIceServers(const EncodableList& iceServersArray, + IceServer* ice_servers); + + protected: + scoped_refptr factory_; + scoped_refptr audio_device_; + scoped_refptr video_device_; + scoped_refptr desktop_device_; + RTCConfiguration configuration_; + + std::map> peerconnections_; + std::map> local_streams_; + std::map> local_tracks_; + std::map> video_capturers_; + std::map> renders_; + std::map> + data_channel_observers_; + std::map> + peerconnection_observers_; + mutable std::mutex mutex_; + + void lock() { mutex_.lock(); } + void unlock() { mutex_.unlock(); } + + protected: + BinaryMessenger* messenger_; + TaskRunner *task_runner_; + TextureRegistrar* textures_; + std::unique_ptr event_channel_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_BASE_HXX diff --git a/common/cpp/include/task_runner.h b/common/cpp/include/task_runner.h new file mode 100644 index 0000000000..74c510c581 --- /dev/null +++ b/common/cpp/include/task_runner.h @@ -0,0 +1,17 @@ +// Copyright 2024 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + #ifndef PACKAGES_FLUTTER_WEBRTC_TASK_RUNNER_H_ + #define PACKAGES_FLUTTER_WEBRTC_TASK_RUNNER_H_ + + #include + + using TaskClosure = std::function; + + class TaskRunner { + public: + virtual void EnqueueTask(TaskClosure task) = 0; + virtual ~TaskRunner() = default; + }; + + #endif // PACKAGES_FLUTTER_WEBRTC_TASK_RUNNER_H_ \ No newline at end of file diff --git a/common/cpp/src/flutter_common.cc b/common/cpp/src/flutter_common.cc new file mode 100644 index 0000000000..1daa606a17 --- /dev/null +++ b/common/cpp/src/flutter_common.cc @@ -0,0 +1,143 @@ +#include "flutter_common.h" +#include "task_runner.h" + +#include + +class MethodCallProxyImpl : public MethodCallProxy { + public: + explicit MethodCallProxyImpl(const MethodCall& method_call) + : method_call_(method_call) {} + + ~MethodCallProxyImpl() {} + + // The name of the method being called. + + const std::string& method_name() const override { + return method_call_.method_name(); + } + + // The arguments to the method call, or NULL if there are none. + const EncodableValue* arguments() const override { + return method_call_.arguments(); + } + + private: + const MethodCall& method_call_; +}; + +std::unique_ptr MethodCallProxy::Create( + const MethodCall& call) { + return std::make_unique(call); +} + +class MethodResultProxyImpl : public MethodResultProxy { + public: + explicit MethodResultProxyImpl(std::unique_ptr method_result) + : method_result_(std::move(method_result)) {} + ~MethodResultProxyImpl() {} + + // Reports success with no result. + void Success() override { method_result_->Success(); } + + // Reports success with a result. + void Success(const EncodableValue& result) override { + method_result_->Success(result); + } + + // Reports an error. + void Error(const std::string& error_code, + const std::string& error_message, + const EncodableValue& error_details) override { + method_result_->Error(error_code, error_message, error_details); + } + + // Reports an error with a default error code and no details. + void Error(const std::string& error_code, + const std::string& error_message = "") override { + method_result_->Error(error_code, error_message); + } + + void NotImplemented() override { method_result_->NotImplemented(); } + + private: + std::unique_ptr method_result_; +}; + +std::unique_ptr MethodResultProxy::Create( + std::unique_ptr method_result) { + return std::make_unique(std::move(method_result)); +} + +class EventChannelProxyImpl : public EventChannelProxy { + public: + EventChannelProxyImpl(BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName) + : channel_(std::make_unique( + messenger, + channelName, + &flutter::StandardMethodCodec::GetInstance())), + task_runner_(task_runner) { + auto handler = std::make_unique< + flutter::StreamHandlerFunctions>( + [&](const EncodableValue* arguments, + std::unique_ptr>&& events) + -> std::unique_ptr> { + sink_ = std::move(events); + std::weak_ptr weak_sink = sink_; + for (auto& event : event_queue_) { + PostEvent(event); + } + event_queue_.clear(); + on_listen_called_ = true; + return nullptr; + }, + [&](const EncodableValue* arguments) + -> std::unique_ptr> { + on_listen_called_ = false; + return nullptr; + }); + + channel_->SetStreamHandler(std::move(handler)); + } + + virtual ~EventChannelProxyImpl() {} + + void Success(const EncodableValue& event, bool cache_event = true) override { + if (on_listen_called_) { + PostEvent(event); + } else { + if (cache_event) { + event_queue_.push_back(event); + } + } + } + + void PostEvent(const EncodableValue& event) { + if(task_runner_) { + std::weak_ptr weak_sink = sink_; + task_runner_->EnqueueTask([weak_sink, event]() { + auto sink = weak_sink.lock(); + if (sink) { + sink->Success(event); + } + }); + } else { + sink_->Success(event); + } + } + + private: + std::unique_ptr channel_; + std::shared_ptr> sink_; + std::list event_queue_; + bool on_listen_called_ = false; + TaskRunner* task_runner_; + }; + +std::unique_ptr EventChannelProxy::Create( + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName) { + return std::make_unique(messenger, task_runner, channelName); +} \ No newline at end of file diff --git a/common/cpp/src/flutter_data_channel.cc b/common/cpp/src/flutter_data_channel.cc new file mode 100644 index 0000000000..37afd12b54 --- /dev/null +++ b/common/cpp/src/flutter_data_channel.cc @@ -0,0 +1,158 @@ +#include "flutter_data_channel.h" + +#include + +namespace flutter_webrtc_plugin { + +FlutterRTCDataChannelObserver::FlutterRTCDataChannelObserver( + scoped_refptr data_channel, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName) + : event_channel_(EventChannelProxy::Create(messenger, task_runner, channelName)), + data_channel_(data_channel) { + data_channel_->RegisterObserver(this); +} + +FlutterRTCDataChannelObserver::~FlutterRTCDataChannelObserver() {} + +void FlutterDataChannel::CreateDataChannel( + const std::string& peerConnectionId, + const std::string& label, + const EncodableMap& dataChannelDict, + RTCPeerConnection* pc, + std::unique_ptr result) { + RTCDataChannelInit init; + init.id = GetValue(dataChannelDict.find(EncodableValue("id"))->second); + init.ordered = + GetValue(dataChannelDict.find(EncodableValue("ordered"))->second); + + if (dataChannelDict.find(EncodableValue("maxRetransmits")) != + dataChannelDict.end()) { + init.maxRetransmits = GetValue( + dataChannelDict.find(EncodableValue("maxRetransmits"))->second); + } + + std::string protocol = "sctp"; + + if (dataChannelDict.find(EncodableValue("protocol")) == + dataChannelDict.end()) { + protocol = GetValue( + dataChannelDict.find(EncodableValue("protocol"))->second); + } + + init.protocol = protocol; + + init.negotiated = GetValue( + dataChannelDict.find(EncodableValue("negotiated"))->second); + + scoped_refptr data_channel = + pc->CreateDataChannel(label.c_str(), &init); + + std::string uuid = base_->GenerateUUID(); + std::string event_channel = + "FlutterWebRTC/dataChannelEvent" + peerConnectionId + uuid; + + std::unique_ptr observer( + new FlutterRTCDataChannelObserver(data_channel, base_->messenger_, base_->task_runner_, + event_channel)); + + base_->lock(); + base_->data_channel_observers_[uuid] = std::move(observer); + base_->unlock(); + + EncodableMap params; + params[EncodableValue("id")] = EncodableValue(init.id); + params[EncodableValue("label")] = + EncodableValue(data_channel->label().std_string()); + params[EncodableValue("flutterId")] = EncodableValue(uuid); + result->Success(EncodableValue(params)); +} + +void FlutterDataChannel::DataChannelSend( + RTCDataChannel* data_channel, + const std::string& type, + const EncodableValue& data, + std::unique_ptr result) { + bool is_binary = type == "binary"; + if (is_binary && TypeIs>(data)) { + std::vector buffer = GetValue>(data); + data_channel->Send(buffer.data(), static_cast(buffer.size()), + true); + } else { + std::string str = GetValue(data); + data_channel->Send(reinterpret_cast(str.c_str()), + static_cast(str.length()), false); + } + result->Success(); +} + +void FlutterDataChannel::DataChannelGetBufferedAmount(RTCDataChannel* data_channel, + std::unique_ptr result) { + EncodableMap params; + params[EncodableValue("bufferedAmount")] = EncodableValue((int64_t)data_channel->buffered_amount()); + result->Success(EncodableValue(params)); +} + +void FlutterDataChannel::DataChannelClose( + RTCDataChannel* data_channel, + const std::string& data_channel_uuid, + std::unique_ptr result) { + data_channel->Close(); + auto it = base_->data_channel_observers_.find(data_channel_uuid); + if (it != base_->data_channel_observers_.end()) + base_->data_channel_observers_.erase(it); + result->Success(); +} + +RTCDataChannel* FlutterDataChannel::DataChannelForId(const std::string& uuid) { + auto it = base_->data_channel_observers_.find(uuid); + + if (it != base_->data_channel_observers_.end()) { + FlutterRTCDataChannelObserver* observer = it->second.get(); + scoped_refptr data_channel = observer->data_channel(); + return data_channel.get(); + } + return nullptr; +} + +static const char* DataStateString(RTCDataChannelState state) { + switch (state) { + case RTCDataChannelConnecting: + return "connecting"; + case RTCDataChannelOpen: + return "open"; + case RTCDataChannelClosing: + return "closing"; + case RTCDataChannelClosed: + return "closed"; + } + return ""; +} + +void FlutterRTCDataChannelObserver::OnStateChange(RTCDataChannelState state) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("dataChannelStateChanged"); + params[EncodableValue("id")] = EncodableValue(data_channel_->id()); + params[EncodableValue("state")] = EncodableValue(DataStateString(state)); + auto data = EncodableValue(params); + event_channel_->Success(data); +} + +void FlutterRTCDataChannelObserver::OnMessage(const char* buffer, + int length, + bool binary) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("dataChannelReceiveMessage"); + + params[EncodableValue("id")] = EncodableValue(data_channel_->id()); + params[EncodableValue("type")] = EncodableValue(binary ? "binary" : "text"); + std::string str(buffer, length); + params[EncodableValue("data")] = + binary ? EncodableValue(std::vector(str.begin(), str.end())) + : EncodableValue(str); + + auto data = EncodableValue(params); + event_channel_->Success(data); +} +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_frame_capturer.cc b/common/cpp/src/flutter_frame_capturer.cc new file mode 100644 index 0000000000..4d0026d74f --- /dev/null +++ b/common/cpp/src/flutter_frame_capturer.cc @@ -0,0 +1,76 @@ +#ifdef _MSC_VER +#define _CRT_SECURE_NO_WARNINGS +#endif + +#include "flutter_frame_capturer.h" +#include +#include +#include "svpng.hpp" + +namespace flutter_webrtc_plugin { + +FlutterFrameCapturer::FlutterFrameCapturer(RTCVideoTrack* track, + std::string path) { + track_ = track; + path_ = path; +} + +void FlutterFrameCapturer::OnFrame(scoped_refptr frame) { + if (frame_ != nullptr) { + return; + } + + frame_ = frame.get()->Copy(); + catch_frame_ = true; +} + +void FlutterFrameCapturer::CaptureFrame( + std::unique_ptr result) { + mutex_.lock(); + // Here init catch_frame_ flag + catch_frame_ = false; + + track_->AddRenderer(this); + // Here waiting for catch_frame_ is set to true + while(!catch_frame_){} + // Here unlock the mutex + mutex_.unlock(); + + mutex_.lock(); + track_->RemoveRenderer(this); + + bool success = SaveFrame(); + mutex_.unlock(); + + std::shared_ptr result_ptr(result.release()); + if (success) { + result_ptr->Success(); + } else { + result_ptr->Error("1", "Cannot save the frame as .png file"); + } +} + +bool FlutterFrameCapturer::SaveFrame() { + if (frame_ == nullptr) { + return false; + } + + int width = frame_.get()->width(); + int height = frame_.get()->height(); + int bytes_per_pixel = 4; + uint8_t* pixels = new uint8_t[width * height * bytes_per_pixel]; + + frame_.get()->ConvertToARGB(RTCVideoFrame::Type::kABGR, pixels, + /* unused */ -1, width, height); + + FILE* file = fopen(path_.c_str(), "wb"); + if (!file) { + return false; + } + + svpng(file, width, height, pixels, 1); + fclose(file); + return true; +} + +} // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/common/cpp/src/flutter_frame_cryptor.cc b/common/cpp/src/flutter_frame_cryptor.cc new file mode 100644 index 0000000000..a9e44e9bd6 --- /dev/null +++ b/common/cpp/src/flutter_frame_cryptor.cc @@ -0,0 +1,608 @@ +#include "flutter_frame_cryptor.h" + +#include "base/scoped_ref_ptr.h" + +namespace flutter_webrtc_plugin { + +libwebrtc::Algorithm AlgorithmFromInt(int algorithm) { + switch (algorithm) { + case 0: + return libwebrtc::Algorithm::kAesGcm; + case 1: + return libwebrtc::Algorithm::kAesCbc; + default: + return libwebrtc::Algorithm::kAesGcm; + } +} + +std::string frameCryptionStateToString(libwebrtc::RTCFrameCryptionState state) { + switch (state) { + case RTCFrameCryptionState::kNew: + return "new"; + case RTCFrameCryptionState::kOk: + return "ok"; + case RTCFrameCryptionState::kDecryptionFailed: + return "decryptionFailed"; + case RTCFrameCryptionState::kEncryptionFailed: + return "encryptionFailed"; + case RTCFrameCryptionState::kInternalError: + return "internalError"; + case RTCFrameCryptionState::kKeyRatcheted: + return "keyRatcheted"; + case RTCFrameCryptionState::kMissingKey: + return "missingKey"; + } + return ""; +} + +void FlutterFrameCryptorObserver::OnFrameCryptionStateChanged( + const string participant_id, + libwebrtc::RTCFrameCryptionState state) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("frameCryptionStateChanged"); + params[EncodableValue("participantId")] = EncodableValue(participant_id.std_string()); + params[EncodableValue("state")] = + EncodableValue(frameCryptionStateToString(state)); + event_channel_->Success(EncodableValue(params)); +} + +bool FlutterFrameCryptor::HandleFrameCryptorMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result, + std::unique_ptr *outResult) { + const std::string& method_name = method_call.method_name(); + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return true; + } + const EncodableMap params = GetValue(*method_call.arguments()); + + if (method_name == "frameCryptorFactoryCreateFrameCryptor") { + FrameCryptorFactoryCreateFrameCryptor(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorSetKeyIndex") { + FrameCryptorSetKeyIndex(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorGetKeyIndex") { + FrameCryptorGetKeyIndex(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorSetEnabled") { + FrameCryptorSetEnabled(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorGetEnabled") { + FrameCryptorGetEnabled(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorDispose") { + FrameCryptorDispose(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorFactoryCreateKeyProvider") { + FrameCryptorFactoryCreateKeyProvider(params, std::move(result)); + return true; + } else if (method_name == "keyProviderSetSharedKey") { + KeyProviderSetSharedKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderRatchetSharedKey") { + KeyProviderRatchetSharedKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderExportSharedKey") { + KeyProviderExportSharedKey(params, std::move(result)); + return true; + }else if (method_name == "keyProviderSetKey") { + KeyProviderSetKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderRatchetKey") { + KeyProviderRatchetKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderExportKey") { + KeyProviderExportKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderSetSifTrailer") { + KeyProviderSetSifTrailer(params, std::move(result)); + return true; + } else if (method_name == "keyProviderDispose") { + KeyProviderDispose(params, std::move(result)); + return true; + } + + *outResult = std::move(result); + return false; +} + +void FlutterFrameCryptor::FrameCryptorFactoryCreateFrameCryptor( + const EncodableMap& constraints, + std::unique_ptr result) { + auto type = findString(constraints, "type"); + if (type == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "type is null"); + return; + } + + auto peerConnectionId = findString(constraints, "peerConnectionId"); + if (peerConnectionId == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "peerConnectionId is null"); + return; + } + + RTCPeerConnection* pc = base_->PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error( + "FrameCryptorFactoryCreateFrameCryptorFailed", + "FrameCryptorFactoryCreateFrameCryptor() peerConnection is null"); + return; + } + + auto rtpSenderId = findString(constraints, "rtpSenderId"); + auto rtpReceiverId = findString(constraints, "rtpReceiverId"); + + if (rtpReceiverId == std::string() && rtpSenderId == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "rtpSenderId or rtpReceiverId is null"); + return; + } + + auto algorithm = findInt(constraints, "algorithm"); + auto participantId = findString(constraints, "participantId"); + auto keyProviderId = findString(constraints, "keyProviderId"); + + if (type == "sender") { + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "sender is null"); + return; + } + std::string uuid = base_->GenerateUUID(); + auto keyProvider = key_providers_[keyProviderId]; + if (keyProvider == nullptr) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "keyProvider is null"); + return; + } + auto frameCryptor = + libwebrtc::FrameCryptorFactory::frameCryptorFromRtpSender(base_->factory_, + string(participantId), sender, AlgorithmFromInt(algorithm), + keyProvider); + std::string event_channel = "FlutterWebRTC/frameCryptorEvent" + uuid; + + scoped_refptr observer(new RefCountedObject(base_->messenger_, base_->task_runner_, event_channel)); + + frameCryptor->RegisterRTCFrameCryptorObserver(observer); + + frame_cryptors_[uuid] = frameCryptor; + frame_cryptor_observers_[uuid] = observer; + EncodableMap params; + params[EncodableValue("frameCryptorId")] = uuid; + + result->Success(EncodableValue(params)); + } else if (type == "receiver") { + auto receiver = base_->GetRtpReceiverById(pc, rtpReceiverId); + if (nullptr == receiver.get()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "receiver is null"); + return; + } + std::string uuid = base_->GenerateUUID(); + auto keyProvider = key_providers_[keyProviderId]; + auto frameCryptor = + libwebrtc::FrameCryptorFactory::frameCryptorFromRtpReceiver(base_->factory_, + string(participantId), receiver, AlgorithmFromInt(algorithm), + keyProvider); + + std::string event_channel = "FlutterWebRTC/frameCryptorEvent" + uuid; + + scoped_refptr observer(new RefCountedObject(base_->messenger_, base_->task_runner_, event_channel)); + + frameCryptor->RegisterRTCFrameCryptorObserver(observer.get()); + + frame_cryptors_[uuid] = frameCryptor; + frame_cryptor_observers_[uuid] = observer; + EncodableMap params; + params[EncodableValue("frameCryptorId")] = uuid; + + result->Success(EncodableValue(params)); + } else { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "type is not sender or receiver"); + } +} + +void FlutterFrameCryptor::FrameCryptorSetKeyIndex( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptor is null"); + return; + } + auto key_index = findInt(constraints, "keyIndex"); + auto res = frameCryptor->SetKeyIndex(key_index); + EncodableMap params; + params[EncodableValue("result")] = res; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorGetKeyIndex( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptor is null"); + return; + } + EncodableMap params; + params[EncodableValue("keyIndex")] = frameCryptor->key_index(); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorSetEnabled( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorSetEnabledFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorSetEnabledFailed", "frameCryptor is null"); + return; + } + auto enabled = findBoolean(constraints, "enabled"); + frameCryptor->SetEnabled(enabled); + EncodableMap params; + params[EncodableValue("result")] = enabled; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorGetEnabled( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetEnabledFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetEnabledFailed", "frameCryptor is null"); + return; + } + EncodableMap params; + params[EncodableValue("enabled")] = frameCryptor->enabled(); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorDispose( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorDisposeFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorDisposeFailed", "frameCryptor is null"); + return; + } + frameCryptor->DeRegisterRTCFrameCryptorObserver(); + frame_cryptors_.erase(frameCryptorId); + frame_cryptor_observers_.erase(frameCryptorId); + EncodableMap params; + params[EncodableValue("result")] = "success"; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorFactoryCreateKeyProvider( + const EncodableMap& constraints, + std::unique_ptr result) { + libwebrtc::KeyProviderOptions options; + + + auto keyProviderOptions = findMap(constraints, "keyProviderOptions"); + if (keyProviderOptions == EncodableMap()) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", "keyProviderOptions is null"); + return; + } + + auto sharedKey = findBoolean(keyProviderOptions, "sharedKey"); + options.shared_key = sharedKey; + + + auto uncryptedMagicBytes = findVector(keyProviderOptions, "uncryptedMagicBytes"); + if (uncryptedMagicBytes.size() != 0) { + options.uncrypted_magic_bytes = uncryptedMagicBytes; + } + + auto ratchetSalt = findVector(keyProviderOptions, "ratchetSalt"); + if (ratchetSalt.size() == 0) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "ratchetSalt is null"); + return; + } + + options.ratchet_salt = ratchetSalt; + + auto ratchetWindowSize = findInt(keyProviderOptions, "ratchetWindowSize"); + if (ratchetWindowSize == -1) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "ratchetSalt is null"); + return; + } + + options.ratchet_window_size = ratchetWindowSize; + + auto failureTolerance = findInt(keyProviderOptions, "failureTolerance"); + options.failure_tolerance = failureTolerance; + + auto keyRingSize = findInt(keyProviderOptions, "keyRingSize"); + options.key_ring_size = keyRingSize; + + auto discardFrameWhenCryptorNotReady = findBoolean(keyProviderOptions, "discardFrameWhenCryptorNotReady"); + options.discard_frame_when_cryptor_not_ready = discardFrameWhenCryptorNotReady; + + auto keyProvider = libwebrtc::KeyProvider::Create(&options); + if (nullptr == keyProvider.get()) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "createKeyProvider failed"); + return; + } + auto uuid = base_->GenerateUUID(); + key_providers_[uuid] = keyProvider; + EncodableMap params; + params[EncodableValue("keyProviderId")] = uuid; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetSharedKey(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetSharedKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetSharedKeyFailed", "keyProvider is null"); + return; + } + + auto key = findVector(constraints, "key"); + if (key.size() == 0) { + result->Error("KeyProviderSetSharedKeyFailed", "key is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetSharedKeyFailed", "keyIndex is null"); + return; + } + + + keyProvider->SetSharedKey(key_index, vector(key)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderRatchetSharedKey(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderRatchetSharedKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderRatchetSharedKeyFailed", "keyProvider is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderRatchetSharedKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->RatchetSharedKey(key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderExportSharedKey(const EncodableMap& constraints, + std::unique_ptr result) { +auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderExportSharedKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderExportSharedKeyFailed", "keyProvider is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderExportSharedKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->ExportSharedKey(key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderExportKey(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderExportKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderExportKeyFailed", "keyProvider is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderExportKeyFailed", "participantId is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderExportKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->ExportKey(participant_id, key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetSifTrailer(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetSifTrailerFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetSifTrailerFailed", "keyProvider is null"); + return; + } + + auto sifTrailer = findVector(constraints, "sifTrailer"); + if (sifTrailer.size() == 0) { + result->Error("KeyProviderSetSifTrailerFailed", "sifTrailer is null"); + return; + } + + keyProvider->SetSifTrailer(vector(sifTrailer)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetKey( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetKeyFailed", "keyProvider is null"); + return; + } + + auto key = findVector(constraints, "key"); + if (key.size() == 0) { + result->Error("KeyProviderSetKeyFailed", "key is null"); + return; + } + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetKeyFailed", "keyIndex is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderSetKeyFailed", "participantId is null"); + return; + } + + keyProvider->SetKey(participant_id, key_index, vector(key)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderRatchetKey( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetKeysFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetKeysFailed", "keyProvider is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderSetKeyFailed", "participantId is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->RatchetKey(participant_id, key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + + +void FlutterFrameCryptor::KeyProviderDispose( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderDisposeFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderDisposeFailed", "keyProvider is null"); + return; + } + key_providers_.erase(keyProviderId); + EncodableMap params; + params[EncodableValue("result")] = "success"; + result->Success(EncodableValue(params)); +} + +} // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/common/cpp/src/flutter_media_stream.cc b/common/cpp/src/flutter_media_stream.cc new file mode 100644 index 0000000000..324dbdc4ce --- /dev/null +++ b/common/cpp/src/flutter_media_stream.cc @@ -0,0 +1,555 @@ +#include "flutter_media_stream.h" + +#define DEFAULT_WIDTH 1280 +#define DEFAULT_HEIGHT 720 +#define DEFAULT_FPS 30 + +namespace flutter_webrtc_plugin { + +FlutterMediaStream::FlutterMediaStream(FlutterWebRTCBase* base) : base_(base) { + base_->audio_device_->OnDeviceChange([&] { + EncodableMap info; + info[EncodableValue("event")] = "onDeviceChange"; + base_->event_channel()->Success(EncodableValue(info), false); + }); +} + +void FlutterMediaStream::GetUserMedia( + const EncodableMap& constraints, + std::unique_ptr result) { + std::string uuid = base_->GenerateUUID(); + scoped_refptr stream = + base_->factory_->CreateStream(uuid.c_str()); + + EncodableMap params; + params[EncodableValue("streamId")] = EncodableValue(uuid); + + auto it = constraints.find(EncodableValue("audio")); + if (it != constraints.end()) { + EncodableValue audio = it->second; + if (TypeIs(audio)) { + if (true == GetValue(audio)) { + GetUserAudio(constraints, stream, params); + } + } else if (TypeIs(audio)) { + GetUserAudio(constraints, stream, params); + } else { + params[EncodableValue("audioTracks")] = EncodableValue(EncodableList()); + } + } else { + params[EncodableValue("audioTracks")] = EncodableValue(EncodableList()); + } + + it = constraints.find(EncodableValue("video")); + params[EncodableValue("videoTracks")] = EncodableValue(EncodableList()); + if (it != constraints.end()) { + EncodableValue video = it->second; + if (TypeIs(video)) { + if (true == GetValue(video)) { + GetUserVideo(constraints, stream, params); + } + } else if (TypeIs(video)) { + GetUserVideo(constraints, stream, params); + } + } + + base_->local_streams_[uuid] = stream; + result->Success(EncodableValue(params)); +} + +void addDefaultAudioConstraints( + scoped_refptr audioConstraints) { + audioConstraints->AddOptionalConstraint("googNoiseSuppression", "true"); + audioConstraints->AddOptionalConstraint("googEchoCancellation", "true"); + audioConstraints->AddOptionalConstraint("echoCancellation", "true"); + audioConstraints->AddOptionalConstraint("googEchoCancellation2", "true"); + audioConstraints->AddOptionalConstraint("googDAEchoCancellation", "true"); +} + +std::string getSourceIdConstraint(const EncodableMap& mediaConstraints) { + auto it = mediaConstraints.find(EncodableValue("optional")); + if (it != mediaConstraints.end() && TypeIs(it->second)) { + EncodableList optional = GetValue(it->second); + for (size_t i = 0, size = optional.size(); i < size; i++) { + if (TypeIs(optional[i])) { + EncodableMap option = GetValue(optional[i]); + auto it2 = option.find(EncodableValue("sourceId")); + if (it2 != option.end() && TypeIs(it2->second)) { + return GetValue(it2->second); + } + } + } + } + return ""; +} + +std::string getDeviceIdConstraint(const EncodableMap& mediaConstraints) { + auto it = mediaConstraints.find(EncodableValue("deviceId")); + if (it != mediaConstraints.end() && TypeIs(it->second)) { + return GetValue(it->second); + } + return ""; +} + +void FlutterMediaStream::GetUserAudio(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params) { + bool enable_audio = false; + scoped_refptr audioConstraints; + std::string sourceId; + std::string deviceId; + auto it = constraints.find(EncodableValue("audio")); + if (it != constraints.end()) { + EncodableValue audio = it->second; + if (TypeIs(audio)) { + audioConstraints = RTCMediaConstraints::Create(); + addDefaultAudioConstraints(audioConstraints); + enable_audio = GetValue(audio); + sourceId = ""; + deviceId = ""; + } + if (TypeIs(audio)) { + EncodableMap localMap = GetValue(audio); + sourceId = getSourceIdConstraint(localMap); + deviceId = getDeviceIdConstraint(localMap); + audioConstraints = base_->ParseMediaConstraints(localMap); + enable_audio = true; + } + } + + // Selecting audio input device by sourceId and audio output device by + // deviceId + + if (enable_audio) { + char strRecordingName[256]; + char strRecordingGuid[256]; + int playout_devices = base_->audio_device_->PlayoutDevices(); + int recording_devices = base_->audio_device_->RecordingDevices(); + + for (uint16_t i = 0; i < recording_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, strRecordingName, + strRecordingGuid); + if (sourceId != "" && sourceId == strRecordingGuid) { + base_->audio_device_->SetRecordingDevice(i); + } + } + + if (sourceId == "") { + base_->audio_device_->RecordingDeviceName(0, strRecordingName, + strRecordingGuid); + sourceId = strRecordingGuid; + } + + char strPlayoutName[256]; + char strPlayoutGuid[256]; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, strPlayoutName, + strPlayoutGuid); + if (deviceId != "" && deviceId == strPlayoutGuid) { + base_->audio_device_->SetPlayoutDevice(i); + } + } + + scoped_refptr source = + base_->factory_->CreateAudioSource("audio_input"); + std::string uuid = base_->GenerateUUID(); + scoped_refptr track = + base_->factory_->CreateAudioTrack(source, uuid.c_str()); + + std::string track_id = track->id().std_string(); + + EncodableMap track_info; + track_info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + track_info[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + track_info[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + track_info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + EncodableMap settings; + settings[EncodableValue("deviceId")] = EncodableValue(sourceId); + settings[EncodableValue("kind")] = EncodableValue("audioinput"); + settings[EncodableValue("autoGainControl")] = EncodableValue(true); + settings[EncodableValue("echoCancellation")] = EncodableValue(true); + settings[EncodableValue("noiseSuppression")] = EncodableValue(true); + settings[EncodableValue("channelCount")] = EncodableValue(1); + settings[EncodableValue("latency")] = EncodableValue(0); + track_info[EncodableValue("settings")] = EncodableValue(settings); + + EncodableList audioTracks; + audioTracks.push_back(EncodableValue(track_info)); + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + stream->AddTrack(track); + + base_->local_tracks_[track->id().std_string()] = track; + } +} + +std::string getFacingMode(const EncodableMap& mediaConstraints) { + return mediaConstraints.find(EncodableValue("facingMode")) != + mediaConstraints.end() + ? GetValue( + mediaConstraints.find(EncodableValue("facingMode"))->second) + : ""; +} + +EncodableValue getConstrainInt(const EncodableMap& constraints, + const std::string& key) { + EncodableValue value; + auto it = constraints.find(EncodableValue(key)); + if (it != constraints.end()) { + if (TypeIs(it->second)) { + return it->second; + } + + if (TypeIs(it->second)) { + EncodableMap innerMap = GetValue(it->second); + auto it2 = innerMap.find(EncodableValue("ideal")); + if (it2 != innerMap.end() && TypeIs(it2->second)) { + return it2->second; + } + } + } + + return EncodableValue(); +} + +void FlutterMediaStream::GetUserVideo(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params) { + EncodableMap video_constraints; + EncodableMap video_mandatory; + auto it = constraints.find(EncodableValue("video")); + if (it != constraints.end() && TypeIs(it->second)) { + video_constraints = GetValue(it->second); + if (video_constraints.find(EncodableValue("mandatory")) != + video_constraints.end()) { + video_mandatory = GetValue( + video_constraints.find(EncodableValue("mandatory"))->second); + } + } + + std::string facing_mode = getFacingMode(video_constraints); + // bool isFacing = facing_mode == "" || facing_mode != "environment"; + std::string sourceId = getSourceIdConstraint(video_constraints); + + EncodableValue widthValue = getConstrainInt(video_constraints, "width"); + + if (widthValue == EncodableValue()) + widthValue = findEncodableValue(video_mandatory, "minWidth"); + + if (widthValue == EncodableValue()) + widthValue = findEncodableValue(video_mandatory, "width"); + + EncodableValue heightValue = getConstrainInt(video_constraints, "height"); + + if (heightValue == EncodableValue()) + heightValue = findEncodableValue(video_mandatory, "minHeight"); + + if (heightValue == EncodableValue()) + heightValue = findEncodableValue(video_mandatory, "height"); + + EncodableValue fpsValue = getConstrainInt(video_constraints, "frameRate"); + + if (fpsValue == EncodableValue()) + fpsValue = findEncodableValue(video_mandatory, "minFrameRate"); + + if (fpsValue == EncodableValue()) + fpsValue = findEncodableValue(video_mandatory, "frameRate"); + + scoped_refptr video_capturer; + char strNameUTF8[256]; + char strGuidUTF8[256]; + int nb_video_devices = base_->video_device_->NumberOfDevices(); + + int32_t width = toInt(widthValue, DEFAULT_WIDTH); + int32_t height = toInt(heightValue, DEFAULT_HEIGHT); + int32_t fps = toInt(fpsValue, DEFAULT_FPS); + + for (int i = 0; i < nb_video_devices; i++) { + base_->video_device_->GetDeviceName(i, strNameUTF8, 256, strGuidUTF8, 256); + if (sourceId != "" && sourceId == strGuidUTF8) { + video_capturer = + base_->video_device_->Create(strNameUTF8, i, width, height, fps); + break; + } + } + + if (nb_video_devices == 0) + return; + + if (!video_capturer.get()) { + base_->video_device_->GetDeviceName(0, strNameUTF8, 128, strGuidUTF8, 128); + sourceId = strGuidUTF8; + video_capturer = + base_->video_device_->Create(strNameUTF8, 0, width, height, fps); + } + + if (!video_capturer.get()) + return; + + + video_capturer->StartCapture(); + + const char* video_source_label = "video_input"; + scoped_refptr source = base_->factory_->CreateVideoSource( + video_capturer, video_source_label, + base_->ParseMediaConstraints(video_constraints)); + + std::string uuid = base_->GenerateUUID(); + scoped_refptr track = + base_->factory_->CreateVideoTrack(source, uuid.c_str()); + + EncodableList videoTracks; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + EncodableMap settings; + settings[EncodableValue("deviceId")] = EncodableValue(sourceId); + settings[EncodableValue("kind")] = EncodableValue("videoinput"); + settings[EncodableValue("width")] = EncodableValue(width); + settings[EncodableValue("height")] = EncodableValue(height); + settings[EncodableValue("frameRate")] = EncodableValue(fps); + info[EncodableValue("settings")] = EncodableValue(settings); + + videoTracks.push_back(EncodableValue(info)); + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + stream->AddTrack(track); + + base_->local_tracks_[track->id().std_string()] = track; + base_->video_capturers_[track->id().std_string()] = video_capturer; +} + +void FlutterMediaStream::GetSources(std::unique_ptr result) { + EncodableList sources; + + int nb_audio_devices = base_->audio_device_->RecordingDevices(); + char strNameUTF8[RTCAudioDevice::kAdmMaxDeviceNameSize + 1] = {0}; + char strGuidUTF8[RTCAudioDevice::kAdmMaxGuidSize + 1] = {0}; + + for (uint16_t i = 0; i < nb_audio_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, strNameUTF8, strGuidUTF8); + EncodableMap audio; + audio[EncodableValue("label")] = EncodableValue(std::string(strNameUTF8)); + audio[EncodableValue("deviceId")] = + EncodableValue(std::string(strGuidUTF8)); + audio[EncodableValue("facing")] = ""; + audio[EncodableValue("kind")] = "audioinput"; + sources.push_back(EncodableValue(audio)); + } + + nb_audio_devices = base_->audio_device_->PlayoutDevices(); + for (uint16_t i = 0; i < nb_audio_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, strNameUTF8, strGuidUTF8); + EncodableMap audio; + audio[EncodableValue("label")] = EncodableValue(std::string(strNameUTF8)); + audio[EncodableValue("deviceId")] = + EncodableValue(std::string(strGuidUTF8)); + audio[EncodableValue("facing")] = ""; + audio[EncodableValue("kind")] = "audiooutput"; + sources.push_back(EncodableValue(audio)); + } + + int nb_video_devices = base_->video_device_->NumberOfDevices(); + for (int i = 0; i < nb_video_devices; i++) { + base_->video_device_->GetDeviceName(i, strNameUTF8, 128, strGuidUTF8, 128); + EncodableMap video; + video[EncodableValue("label")] = EncodableValue(std::string(strNameUTF8)); + video[EncodableValue("deviceId")] = + EncodableValue(std::string(strGuidUTF8)); + video[EncodableValue("facing")] = i == 1 ? "front" : "back"; + video[EncodableValue("kind")] = "videoinput"; + sources.push_back(EncodableValue(video)); + } + EncodableMap params; + params[EncodableValue("sources")] = EncodableValue(sources); + result->Success(EncodableValue(params)); +} + +void FlutterMediaStream::SelectAudioOutput( + const std::string& device_id, + std::unique_ptr result) { + char strPlayoutName[256]; + char strPlayoutGuid[256]; + int playout_devices = base_->audio_device_->PlayoutDevices(); + bool found = false; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, strPlayoutName, strPlayoutGuid); + if (device_id != "" && device_id == strPlayoutGuid) { + base_->audio_device_->SetPlayoutDevice(i); + found = true; + break; + } + } + if (!found) { + result->Error("Bad Arguments", "Not found device id: " + device_id); + return; + } + result->Success(); +} + +void FlutterMediaStream::SelectAudioInput( + const std::string& device_id, + std::unique_ptr result) { + char strPlayoutName[256]; + char strPlayoutGuid[256]; + int playout_devices = base_->audio_device_->RecordingDevices(); + bool found = false; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, strPlayoutName, + strPlayoutGuid); + if (device_id != "" && device_id == strPlayoutGuid) { + base_->audio_device_->SetRecordingDevice(i); + found = true; + break; + } + } + if (!found) { + result->Error("Bad Arguments", "Not found device id: " + device_id); + return; + } + result->Success(); +} + +void FlutterMediaStream::MediaStreamGetTracks( + const std::string& stream_id, + std::unique_ptr result) { + scoped_refptr stream = base_->MediaStreamForId(stream_id); + + if (stream) { + EncodableMap params; + EncodableList audioTracks; + + auto audio_tracks = stream->audio_tracks(); + for (auto track : audio_tracks.std_vector()) { + base_->local_tracks_[track->id().std_string()] = track; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + info[EncodableValue("remote")] = EncodableValue(true); + info[EncodableValue("readyState")] = "live"; + audioTracks.push_back(EncodableValue(info)); + } + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + + EncodableList videoTracks; + auto video_tracks = stream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + base_->local_tracks_[track->id().std_string()] = track; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + info[EncodableValue("remote")] = EncodableValue(true); + info[EncodableValue("readyState")] = "live"; + videoTracks.push_back(EncodableValue(info)); + } + + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + result->Success(EncodableValue(params)); + } else { + result->Error("MediaStreamGetTracksFailed", + "MediaStreamGetTracks() media stream is null !"); + } +} + +void FlutterMediaStream::MediaStreamDispose( + const std::string& stream_id, + std::unique_ptr result) { + scoped_refptr stream = base_->MediaStreamForId(stream_id); + + if (!stream) { + result->Error("MediaStreamDisposeFailed", + "stream [" + stream_id + "] not found!"); + return; + } + + vector> audio_tracks = stream->audio_tracks(); + + for (auto track : audio_tracks.std_vector()) { + stream->RemoveTrack(track); + base_->local_tracks_.erase(track->id().std_string()); + } + + vector> video_tracks = stream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + stream->RemoveTrack(track); + base_->local_tracks_.erase(track->id().std_string()); + if (base_->video_capturers_.find(track->id().std_string()) != + base_->video_capturers_.end()) { + auto video_capture = base_->video_capturers_[track->id().std_string()]; + if (video_capture->CaptureStarted()) { + video_capture->StopCapture(); + } + base_->video_capturers_.erase(track->id().std_string()); + } + } + + base_->RemoveStreamForId(stream_id); + result->Success(); +} + +void FlutterMediaStream::CreateLocalMediaStream( + std::unique_ptr result) { + std::string uuid = base_->GenerateUUID(); + scoped_refptr stream = + base_->factory_->CreateStream(uuid.c_str()); + + EncodableMap params; + params[EncodableValue("streamId")] = EncodableValue(uuid); + + base_->local_streams_[uuid] = stream; + result->Success(EncodableValue(params)); +} + +void FlutterMediaStream::MediaStreamTrackSetEnable( + const std::string& track_id, + std::unique_ptr result) { + result->NotImplemented(); +} + +void FlutterMediaStream::MediaStreamTrackSwitchCamera( + const std::string& track_id, + std::unique_ptr result) { + result->NotImplemented(); +} + +void FlutterMediaStream::MediaStreamTrackDispose( + const std::string& track_id, + std::unique_ptr result) { + for (auto it : base_->local_streams_) { + auto stream = it.second; + auto audio_tracks = stream->audio_tracks(); + for (auto track : audio_tracks.std_vector()) { + if (track->id().std_string() == track_id) { + stream->RemoveTrack(track); + } + } + auto video_tracks = stream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + if (track->id().std_string() == track_id) { + stream->RemoveTrack(track); + + if (base_->video_capturers_.find(track_id) != + base_->video_capturers_.end()) { + auto video_capture = base_->video_capturers_[track_id]; + if (video_capture->CaptureStarted()) { + video_capture->StopCapture(); + } + base_->video_capturers_.erase(track_id); + } + } + } + } + base_->RemoveMediaTrackForId(track_id); + result->Success(); +} +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_peerconnection.cc b/common/cpp/src/flutter_peerconnection.cc new file mode 100644 index 0000000000..691ec29f19 --- /dev/null +++ b/common/cpp/src/flutter_peerconnection.cc @@ -0,0 +1,1387 @@ +#include "flutter_peerconnection.h" + +#include "base/scoped_ref_ptr.h" +#include "flutter_data_channel.h" +#include "flutter_frame_capturer.h" +#include "rtc_dtmf_sender.h" +#include "rtc_rtp_parameters.h" + +namespace flutter_webrtc_plugin { + +std::string RTCMediaTypeToString(RTCMediaType type) { + switch (type) { + case libwebrtc::RTCMediaType::AUDIO: + return "audio"; + case libwebrtc::RTCMediaType::VIDEO: + return "video"; + case libwebrtc::RTCMediaType::DATA: + return "data"; + case libwebrtc::RTCMediaType::UNSUPPORTED: + return "unsupported"; + } + return ""; +} + +std::string transceiverDirectionString(RTCRtpTransceiverDirection direction) { + switch (direction) { + case RTCRtpTransceiverDirection::kSendRecv: + return "sendrecv"; + case RTCRtpTransceiverDirection::kSendOnly: + return "sendonly"; + case RTCRtpTransceiverDirection::kRecvOnly: + return "recvonly"; + case RTCRtpTransceiverDirection::kInactive: + return "inactive"; + case RTCRtpTransceiverDirection::kStopped: + return "stoped"; + } + return ""; +} + +const char* iceConnectionStateString(RTCIceConnectionState state) { + switch (state) { + case RTCIceConnectionStateNew: + return "new"; + case RTCIceConnectionStateChecking: + return "checking"; + case RTCIceConnectionStateConnected: + return "connected"; + case RTCIceConnectionStateCompleted: + return "completed"; + case RTCIceConnectionStateFailed: + return "failed"; + case RTCIceConnectionStateDisconnected: + return "disconnected"; + case RTCIceConnectionStateClosed: + return "closed"; + case RTCIceConnectionStateMax: + return "statemax"; + } + return ""; +} + +const char* signalingStateString(RTCSignalingState state) { + switch (state) { + case RTCSignalingStateStable: + return "stable"; + case RTCSignalingStateHaveLocalOffer: + return "have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: + return "have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: + return "have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: + return "have-remote-pranswer"; + case RTCSignalingStateClosed: + return "closed"; + } + return ""; +} + +const char* peerConnectionStateString(RTCPeerConnectionState state) { + switch (state) { + case RTCPeerConnectionStateNew: + return "new"; + case RTCPeerConnectionStateConnecting: + return "connecting"; + case RTCPeerConnectionStateConnected: + return "connected"; + case RTCPeerConnectionStateDisconnected: + return "disconnected"; + case RTCPeerConnectionStateFailed: + return "failed"; + case RTCPeerConnectionStateClosed: + return "closed"; + } + return ""; +} + +const char* iceGatheringStateString(RTCIceGatheringState state) { + switch (state) { + case RTCIceGatheringStateNew: + return "new"; + case RTCIceGatheringStateGathering: + return "gathering"; + case RTCIceGatheringStateComplete: + return "complete"; + } + return ""; +} + +EncodableMap rtpParametersToMap( + libwebrtc::scoped_refptr rtpParameters) { + EncodableMap info; + info[EncodableValue("transactionId")] = + EncodableValue(rtpParameters->transaction_id().std_string()); + + EncodableMap rtcp; + rtcp[EncodableValue("cname")] = + EncodableValue(rtpParameters->rtcp_parameters()->cname().std_string()); + rtcp[EncodableValue("reducedSize")] = + EncodableValue(rtpParameters->rtcp_parameters()->reduced_size()); + + info[EncodableValue("rtcp")] = EncodableValue((rtcp)); + + EncodableList headerExtensions; + auto header_extensions = rtpParameters->header_extensions(); + for (scoped_refptr extension : + header_extensions.std_vector()) { + EncodableMap map; + map[EncodableValue("uri")] = EncodableValue(extension->uri().std_string()); + map[EncodableValue("id")] = EncodableValue(extension->id()); + map[EncodableValue("encrypted")] = EncodableValue(extension->encrypt()); + headerExtensions.push_back(EncodableValue(map)); + } + info[EncodableValue("headerExtensions")] = EncodableValue(headerExtensions); + + EncodableList encodings_info; + auto encodings = rtpParameters->encodings(); + for (scoped_refptr encoding : + encodings.std_vector()) { + EncodableMap map; + map[EncodableValue("active")] = EncodableValue(encoding->active()); + map[EncodableValue("maxBitrate")] = + EncodableValue(encoding->max_bitrate_bps()); + map[EncodableValue("minBitrate")] = + EncodableValue(encoding->min_bitrate_bps()); + map[EncodableValue("maxFramerate")] = + EncodableValue(static_cast(encoding->max_framerate())); + map[EncodableValue("scaleResolutionDownBy")] = + EncodableValue(encoding->scale_resolution_down_by()); + map[EncodableValue("scalabilityMode")] = + EncodableValue(encoding->scalability_mode().std_string()); + map[EncodableValue("ssrc")] = + EncodableValue(static_cast(encoding->ssrc())); + encodings_info.push_back(EncodableValue(map)); + } + info[EncodableValue("encodings")] = EncodableValue(encodings_info); + + EncodableList codecs_info; + auto codecs = rtpParameters->codecs(); + for (scoped_refptr codec : codecs.std_vector()) { + EncodableMap map; + map[EncodableValue("name")] = EncodableValue(codec->name().std_string()); + map[EncodableValue("payloadType")] = EncodableValue(codec->payload_type()); + map[EncodableValue("clockRate")] = EncodableValue(codec->clock_rate()); + map[EncodableValue("numChannels")] = EncodableValue(codec->num_channels()); + + EncodableMap param; + auto parameters = codec->parameters(); + for (auto item : parameters.std_vector()) { + param[EncodableValue(item.first.std_string())] = + EncodableValue(item.second.std_string()); + } + map[EncodableValue("parameters")] = EncodableValue(param); + + map[EncodableValue("kind")] = + EncodableValue(RTCMediaTypeToString(codec->kind())); + + codecs_info.push_back(EncodableValue(map)); + } + info[EncodableValue("codecs")] = EncodableValue(codecs_info); + + switch (rtpParameters->GetDegradationPreference()) { + case libwebrtc::RTCDegradationPreference::MAINTAIN_FRAMERATE: + info[EncodableValue("degradationPreference")] = + EncodableValue("maintain-framerate"); + break; + case libwebrtc::RTCDegradationPreference::MAINTAIN_RESOLUTION: + info[EncodableValue("degradationPreference")] = + EncodableValue("maintain-resolution"); + break; + case libwebrtc::RTCDegradationPreference::BALANCED: + info[EncodableValue("degradationPreference")] = + EncodableValue("balanced"); + break; + case libwebrtc::RTCDegradationPreference::DISABLED: + info[EncodableValue("degradationPreference")] = + EncodableValue("disabled"); + break; + default: + info[EncodableValue("degradationPreference")] = + EncodableValue("balanced"); + break; + } + + return info; +} + +EncodableMap dtmfSenderToMap(scoped_refptr dtmfSender, + std::string id) { + EncodableMap info; + if (nullptr != dtmfSender.get()) { + info[EncodableValue("dtmfSenderId")] = EncodableValue(id); + if (dtmfSender.get()) { + info[EncodableValue("interToneGap")] = + EncodableValue(dtmfSender->inter_tone_gap()); + info[EncodableValue("duration")] = EncodableValue(dtmfSender->duration()); + } + } + return info; +} + +EncodableMap mediaTrackToMap( + libwebrtc::scoped_refptr track) { + EncodableMap info; + if (nullptr == track.get()) { + return info; + } + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("video")) { + info[EncodableValue("readyState")] = + EncodableValue(static_cast(track.get())->state()); + info[EncodableValue("label")] = EncodableValue("video"); + } else if (0 == kind.compare("audio")) { + info[EncodableValue("readyState")] = + EncodableValue(static_cast(track.get())->state()); + info[EncodableValue("label")] = EncodableValue("audio"); + } + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + return info; +} + +EncodableMap rtpSenderToMap( + libwebrtc::scoped_refptr sender) { + EncodableMap info; + std::string id = sender->id().std_string(); + info[EncodableValue("senderId")] = EncodableValue(id); + info[EncodableValue("ownsTrack")] = EncodableValue(true); + info[EncodableValue("dtmfSender")] = + EncodableValue(dtmfSenderToMap(sender->dtmf_sender(), id)); + info[EncodableValue("rtpParameters")] = + EncodableValue(rtpParametersToMap(sender->parameters())); + info[EncodableValue("track")] = + EncodableValue(mediaTrackToMap(sender->track())); + return info; +} + +std::string trackStateToString(libwebrtc::RTCMediaTrack::RTCTrackState state) { + switch (state) { + case libwebrtc::RTCMediaTrack::kLive: + return "live"; + case libwebrtc::RTCMediaTrack::kEnded: + return "ended"; + default: + return ""; + } +} + +EncodableMap rtpReceiverToMap( + libwebrtc::scoped_refptr receiver) { + EncodableMap info; + info[EncodableValue("receiverId")] = + EncodableValue(receiver->id().std_string()); + info[EncodableValue("rtpParameters")] = + EncodableValue(rtpParametersToMap(receiver->parameters())); + info[EncodableValue("track")] = + EncodableValue(mediaTrackToMap(receiver->track())); + return info; +} + +EncodableMap transceiverToMap(scoped_refptr transceiver) { + EncodableMap info; + info[EncodableValue("transceiverId")] = + EncodableValue(transceiver->transceiver_id().std_string()); + info[EncodableValue("mid")] = EncodableValue(transceiver->mid().std_string()); + info[EncodableValue("direction")] = + EncodableValue(transceiverDirectionString(transceiver->direction())); + info[EncodableValue("sender")] = + EncodableValue(rtpSenderToMap(transceiver->sender())); + info[EncodableValue("receiver")] = + EncodableValue(rtpReceiverToMap(transceiver->receiver())); + return info; +} + +EncodableMap mediaStreamToMap(scoped_refptr stream, + std::string id) { + EncodableMap params; + params[EncodableValue("streamId")] = + EncodableValue(stream->id().std_string()); + params[EncodableValue("ownerTag")] = EncodableValue(id); + EncodableList audioTracks; + auto audio_tracks = stream->audio_tracks(); + for (scoped_refptr val : audio_tracks.std_vector()) { + audioTracks.push_back(EncodableValue(mediaTrackToMap(val))); + } + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + + EncodableList videoTracks; + auto video_tracks = stream->video_tracks(); + for (scoped_refptr val : video_tracks.std_vector()) { + videoTracks.push_back(EncodableValue(mediaTrackToMap(val))); + } + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + return params; +} + +void FlutterPeerConnection::CreateRTCPeerConnection( + const EncodableMap& configurationMap, + const EncodableMap& constraintsMap, + std::unique_ptr result) { + // std::cout << " configuration = " << configurationMap.StringValue() << + // std::endl; + base_->ParseRTCConfiguration(configurationMap, base_->configuration_); + // std::cout << " constraints = " << constraintsMap.StringValue() << + // std::endl; + scoped_refptr constraints = + base_->ParseMediaConstraints(constraintsMap); + + std::string uuid = base_->GenerateUUID(); + scoped_refptr pc = + base_->factory_->Create(base_->configuration_, constraints); + base_->peerconnections_[uuid] = pc; + + std::string event_channel = "FlutterWebRTC/peerConnectionEvent" + uuid; + + std::unique_ptr observer( + new FlutterPeerConnectionObserver(base_, pc, base_->messenger_, + base_->task_runner_, + event_channel, uuid)); + + base_->peerconnection_observers_[uuid] = std::move(observer); + + EncodableMap params; + params[EncodableValue("peerConnectionId")] = EncodableValue(uuid); + result->Success(EncodableValue(params)); +} + +void FlutterPeerConnection::RTCPeerConnectionClose( + RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result) { + auto it2 = base_->peerconnections_.find(uuid); + if (it2 != base_->peerconnections_.end()) { + it2->second->Close(); + base_->peerconnections_.erase(it2); + } + + auto it = base_->peerconnection_observers_.find(uuid); + if (it != base_->peerconnection_observers_.end()) + base_->peerconnection_observers_.erase(it); + + result->Success(); +} + +void FlutterPeerConnection::RTCPeerConnectionDispose( + RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result) { + result->Success(); +} + +void FlutterPeerConnection::CreateOffer( + const EncodableMap& constraintsMap, + RTCPeerConnection* pc, + std::unique_ptr result) { + scoped_refptr constraints = + base_->ParseMediaConstraints(constraintsMap); + std::shared_ptr result_ptr(result.release()); + pc->CreateOffer( + [result_ptr](const libwebrtc::string sdp, const libwebrtc::string type) { + EncodableMap params; + params[EncodableValue("sdp")] = EncodableValue(sdp.std_string()); + params[EncodableValue("type")] = EncodableValue(type.std_string()); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("createOfferFailed", error); + }, + constraints); +} + +void FlutterPeerConnection::CreateAnswer( + const EncodableMap& constraintsMap, + RTCPeerConnection* pc, + std::unique_ptr result) { + scoped_refptr constraints = + base_->ParseMediaConstraints(constraintsMap); + std::shared_ptr result_ptr(result.release()); + pc->CreateAnswer( + [result_ptr](const libwebrtc::string sdp, const libwebrtc::string type) { + EncodableMap params; + params[EncodableValue("sdp")] = EncodableValue(sdp.std_string()); + params[EncodableValue("type")] = EncodableValue(type.std_string()); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("createAnswerFailed", error); + }, + constraints); +} + +void FlutterPeerConnection::SetLocalDescription( + RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->SetLocalDescription( + sdp->sdp(), sdp->type(), [result_ptr]() { result_ptr->Success(); }, + [result_ptr](const char* error) { + result_ptr->Error("setLocalDescriptionFailed", error); + }); +} + +void FlutterPeerConnection::SetRemoteDescription( + RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->SetRemoteDescription( + sdp->sdp(), sdp->type(), [result_ptr]() { result_ptr->Success(); }, + [result_ptr](const char* error) { + result_ptr->Error("setRemoteDescriptionFailed", error); + }); +} + +void FlutterPeerConnection::GetLocalDescription( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->GetLocalDescription( + [result_ptr](const char* sdp, const char* type) { + EncodableMap params; + params[EncodableValue("sdp")] = sdp; + params[EncodableValue("type")] = type; + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("getLocalDescriptionFailed", error); + }); +} + +void FlutterPeerConnection::GetRemoteDescription( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->GetRemoteDescription( + [result_ptr](const char* sdp, const char* type) { + EncodableMap params; + params[EncodableValue("sdp")] = sdp; + params[EncodableValue("type")] = type; + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("getRemoteDescriptionFailed", error); + }); +} + +scoped_refptr +FlutterPeerConnection::mapToRtpTransceiverInit(const EncodableMap& params) { + EncodableList streamIds = findList(params, "streamIds"); + + std::vector stream_ids; + for (auto item : streamIds) { + std::string id = GetValue(item); + stream_ids.push_back(id.c_str()); + } + RTCRtpTransceiverDirection dir = RTCRtpTransceiverDirection::kInactive; + EncodableValue direction = findEncodableValue(params, "direction"); + if (!direction.IsNull()) { + dir = stringToTransceiverDirection(GetValue(direction)); + } + EncodableList sendEncodings = findList(params, "sendEncodings"); + std::vector> encodings; + for (EncodableValue value : sendEncodings) { + encodings.push_back(mapToEncoding(GetValue(value))); + } + scoped_refptr init = + RTCRtpTransceiverInit::Create(dir, stream_ids, encodings); + return init; +} + +RTCRtpTransceiverDirection FlutterPeerConnection::stringToTransceiverDirection( + std::string direction) { + if (0 == direction.compare("sendrecv")) { + return RTCRtpTransceiverDirection::kSendRecv; + } else if (0 == direction.compare("sendonly")) { + return RTCRtpTransceiverDirection::kSendOnly; + } else if (0 == direction.compare("recvonly")) { + return RTCRtpTransceiverDirection::kRecvOnly; + } else if (0 == direction.compare("stoped")) { + return RTCRtpTransceiverDirection::kStopped; + } else if (0 == direction.compare("inactive")) { + return RTCRtpTransceiverDirection::kInactive; + } + return RTCRtpTransceiverDirection::kInactive; +} + +libwebrtc::scoped_refptr +FlutterPeerConnection::mapToEncoding(const EncodableMap& params) { + libwebrtc::scoped_refptr encoding = + RTCRtpEncodingParameters::Create(); + + encoding->set_active(true); + encoding->set_scale_resolution_down_by(1.0); + + EncodableValue value = findEncodableValue(params, "active"); + if (!value.IsNull()) { + encoding->set_active(GetValue(value)); + } + + value = findEncodableValue(params, "rid"); + if (!value.IsNull()) { + const std::string rid = GetValue(value); + encoding->set_rid(rid.c_str()); + } + + value = findEncodableValue(params, "ssrc"); + if (!value.IsNull()) { + encoding->set_ssrc((uint32_t)GetValue(value)); + } + + value = findEncodableValue(params, "minBitrate"); + if (!value.IsNull()) { + encoding->set_min_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(params, "maxBitrate"); + if (!value.IsNull()) { + encoding->set_max_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(params, "maxFramerate"); + if (!value.IsNull()) { + encoding->set_max_framerate(GetValue(value)); + } + + value = findEncodableValue(params, "numTemporalLayers"); + if (!value.IsNull()) { + encoding->set_num_temporal_layers(GetValue(value)); + } + + value = findEncodableValue(params, "scaleResolutionDownBy"); + if (!value.IsNull()) { + encoding->set_scale_resolution_down_by(GetValue(value)); + } + + value = findEncodableValue(params, "scalabilityMode"); + if (!value.IsNull()) { + encoding->set_scalability_mode(GetValue(value)); + } + + return encoding; +} + +RTCMediaType stringToMediaType(const std::string& mediaType) { + RTCMediaType type = RTCMediaType::UNSUPPORTED; + if (mediaType == "audio") + type = RTCMediaType::AUDIO; + else if (mediaType == "video") + type = RTCMediaType::VIDEO; + else if (mediaType == "data") + type = RTCMediaType::DATA; + return type; +} + +void FlutterPeerConnection::AddTransceiver( + RTCPeerConnection* pc, + const std::string& trackId, + const std::string& mediaType, + const EncodableMap& transceiverInit, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + RTCMediaTrack* track = base_->MediaTrackForId(trackId); + RTCMediaType type = stringToMediaType(mediaType); + + if (0 < transceiverInit.size()) { + auto transceiver = + track != nullptr ? pc->AddTransceiver( + track, mapToRtpTransceiverInit(transceiverInit)) + : pc->AddTransceiver( + type, mapToRtpTransceiverInit(transceiverInit)); + if (nullptr != transceiver.get()) { + result_ptr->Success(EncodableValue(transceiverToMap(transceiver))); + return; + } + result_ptr->Error("AddTransceiver(track | mediaType, init)", + "AddTransceiver error"); + } else { + auto transceiver = + track != nullptr ? pc->AddTransceiver(track) : pc->AddTransceiver(type); + if (nullptr != transceiver.get()) { + result_ptr->Success(EncodableValue(transceiverToMap(transceiver))); + return; + } + result_ptr->Error("AddTransceiver(track, mediaType)", + "AddTransceiver error"); + } +} + +void FlutterPeerConnection::GetTransceivers( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + EncodableMap map; + EncodableList info; + auto transceivers = pc->transceivers(); + for (scoped_refptr transceiver : + transceivers.std_vector()) { + info.push_back(EncodableValue(transceiverToMap(transceiver))); + } + map[EncodableValue("transceivers")] = EncodableValue(info); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::GetReceivers( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + EncodableMap map; + EncodableList info; + auto receivers = pc->receivers(); + for (scoped_refptr receiver : receivers.std_vector()) { + info.push_back(EncodableValue(rtpReceiverToMap(receiver))); + } + map[EncodableValue("receivers")] = EncodableValue(info); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::RtpSenderSetTrack( + RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetTrack", "sender is null"); + return; + } + sender->set_track(track); + result_ptr->Success(); +} + +void FlutterPeerConnection::RtpSenderSetStream( + RTCPeerConnection* pc, + std::vector streamIds, + std::string rtpSenderId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetTrack", "sender is null"); + return; + } + sender->set_stream_ids(streamIds); + result_ptr->Success(); +} + +void FlutterPeerConnection::RtpSenderReplaceTrack( + RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderReplaceTrack", "sender is null"); + return; + } + + sender->set_track(track); + + result_ptr->Success(); +} + +scoped_refptr FlutterPeerConnection::updateRtpParameters( + EncodableMap newParameters, + scoped_refptr parameters) { + EncodableList encodings = findList(newParameters, "encodings"); + auto encoding = encodings.begin(); + auto params = parameters->encodings(); + for (auto param : params.std_vector()) { + if (encoding != encodings.end()) { + EncodableMap map = GetValue(*encoding); + EncodableValue value = findEncodableValue(map, "active"); + if (!value.IsNull()) { + param->set_active(GetValue(value)); + } + value = findEncodableValue(map, "rid"); + if (!value.IsNull()) { + param->set_rid(GetValue(value)); + } + value = findEncodableValue(map, "ssrc"); + if (!value.IsNull()) { + param->set_ssrc(GetValue(value)); + } + value = findEncodableValue(map, "maxBitrate"); + if (!value.IsNull()) { + param->set_max_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(map, "minBitrate"); + if (!value.IsNull()) { + param->set_min_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(map, "maxFramerate"); + if (!value.IsNull()) { + param->set_max_framerate(GetValue(value)); + } + value = findEncodableValue(map, "numTemporalLayers"); + if (!value.IsNull()) { + param->set_num_temporal_layers(GetValue(value)); + } + value = findEncodableValue(map, "scaleResolutionDownBy"); + if (!value.IsNull()) { + param->set_scale_resolution_down_by(GetValue(value)); + } + value = findEncodableValue(map, "scalabilityMode"); + if (!value.IsNull()) { + param->set_scalability_mode(GetValue(value)); + } + encoding++; + } + } + + EncodableValue value = + findEncodableValue(newParameters, "degradationPreference"); + if (!value.IsNull()) { + const std::string degradationPreference = GetValue(value); + if (degradationPreference == "maintain-framerate") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::MAINTAIN_FRAMERATE); + } else if (degradationPreference == "maintain-resolution") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::MAINTAIN_RESOLUTION); + } else if (degradationPreference == "balanced") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::BALANCED); + } else if (degradationPreference == "disabled") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::DISABLED); + } + } + + return parameters; +} + +void FlutterPeerConnection::RtpSenderSetParameters( + RTCPeerConnection* pc, + std::string rtpSenderId, + const EncodableMap& parameters, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetParameters", "sender is null"); + return; + } + + auto param = sender->parameters(); + param = updateRtpParameters(parameters, param); + bool success = sender->set_parameters(param); + + EncodableMap map; + map[EncodableValue("result")] = EncodableValue(success); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::RtpTransceiverStop( + RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("rtpTransceiverStop", "transceiver is null"); + return; + } + transceiver->StopInternal(); + result_ptr->Success(); +} + +void FlutterPeerConnection::RtpTransceiverGetCurrentDirection( + RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("rtpTransceiverGetCurrentDirection", + "transceiver is null"); + return; + } + EncodableMap map; + map[EncodableValue("result")] = EncodableValue( + transceiverDirectionString(transceiver->current_direction())); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::SetConfiguration( + RTCPeerConnection* pc, + const EncodableMap& configuration, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + // TODO pc->SetConfiguration(); + + result_ptr->Success(); +} + +void FlutterPeerConnection::CaptureFrame( + RTCVideoTrack* track, + std::string path, + std::unique_ptr result) { + FlutterFrameCapturer capturer(track, path); + capturer.CaptureFrame(std::move(result)); +} + +scoped_refptr FlutterPeerConnection::getRtpTransceiverById( + RTCPeerConnection* pc, + std::string id) { + scoped_refptr result; + auto transceivers = pc->transceivers(); + for (scoped_refptr transceiver : + transceivers.std_vector()) { + if (nullptr == result.get() && + 0 == id.compare(transceiver->transceiver_id().std_string())) { + result = transceiver; + } + } + return result; +} + +void FlutterPeerConnection::RtpTransceiverSetDirection( + RTCPeerConnection* pc, + std::string transceiverId, + std::string direction, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("RtpTransceiverSetDirection", " transceiver is null "); + return; + } + auto res = transceiver->SetDirectionWithError( + stringToTransceiverDirection(direction)); + if (res.std_string() == "") { + result_ptr->Success(); + } else { + result_ptr->Error("RtpTransceiverSetDirection", res.std_string()); + } +} + +void FlutterPeerConnection::RtpTransceiverSetCodecPreferences( + RTCPeerConnection* pc, + std::string transceiverId, + const EncodableList codecs, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("RtpTransceiverSetCodecPreferences", + " transceiver is null "); + return; + } + std::vector> codecList; + for (auto codec : codecs) { + auto codecMap = GetValue(codec); + auto codecMimeType = findString(codecMap, "mimeType"); + auto codecClockRate = findInt(codecMap, "clockRate"); + auto codecNumChannels = findInt(codecMap, "channels"); + auto codecSdpFmtpLine = findString(codecMap, "sdpFmtpLine"); + auto codecCapability = RTCRtpCodecCapability::Create(); + if (codecSdpFmtpLine != std::string() && codecSdpFmtpLine.length() != 0) + codecCapability->set_sdp_fmtp_line(codecSdpFmtpLine); + codecCapability->set_clock_rate(codecClockRate); + if (codecNumChannels != -1) + codecCapability->set_channels(codecNumChannels); + codecCapability->set_mime_type(codecMimeType); + codecList.push_back(codecCapability); + } + transceiver->SetCodecPreferences(codecList); + result_ptr->Success(); +} + +void FlutterPeerConnection::GetSenders( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + EncodableMap map; + EncodableList info; + auto senders = pc->senders(); + for (scoped_refptr sender : senders.std_vector()) { + info.push_back(EncodableValue(rtpSenderToMap(sender))); + } + map[EncodableValue("senders")] = EncodableValue(info); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::AddIceCandidate( + RTCIceCandidate* candidate, + RTCPeerConnection* pc, + std::unique_ptr result) { + pc->AddCandidate(candidate->sdp_mid(), candidate->sdp_mline_index(), + candidate->candidate()); + + result->Success(); +} + +EncodableMap statsToMap(const scoped_refptr& stats) { + EncodableMap report_map; + report_map[EncodableValue("id")] = EncodableValue(stats->id().std_string()); + report_map[EncodableValue("type")] = + EncodableValue(stats->type().std_string()); + report_map[EncodableValue("timestamp")] = + EncodableValue(static_cast(stats->timestamp_us())); + EncodableMap values; + auto members = stats->Members(); + for (int i = 0; i < members.size(); i++) { + auto member = members[i]; + switch (member->GetType()) { + case RTCStatsMember::Type::kBool: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueBool()); + break; + case RTCStatsMember::Type::kInt32: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueInt32()); + break; + case RTCStatsMember::Type::kUint32: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue((int64_t)member->ValueUint32()); + break; + case RTCStatsMember::Type::kInt64: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueInt64()); + break; + case RTCStatsMember::Type::kUint64: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue((int64_t)member->ValueUint64()); + break; + case RTCStatsMember::Type::kDouble: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueDouble()); + break; + case RTCStatsMember::Type::kString: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueString().std_string()); + break; + default: + break; + } + } + report_map[EncodableValue("values")] = EncodableValue(values); + return report_map; +} + +void FlutterPeerConnection::GetStats( + const std::string& track_id, + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + scoped_refptr track = base_->MediaTracksForId(track_id); + if (track != nullptr && track_id != "") { + bool found = false; + auto receivers = pc->receivers(); + for (auto receiver : receivers.std_vector()) { + if (receiver->track() && receiver->track()->id().c_string() == track_id) { + found = true; + pc->GetStats( + receiver, + [result_ptr](const vector> reports) { + EncodableList list; + for (int i = 0; i < reports.size(); i++) { + list.push_back(EncodableValue(statsToMap(reports[i]))); + } + EncodableMap params; + params[EncodableValue("stats")] = EncodableValue(list); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("GetStats", error); + }); + return; + } + } + auto senders = pc->senders(); + for (auto sender : senders.std_vector()) { + if (sender->track() && sender->track()->id().c_string() == track_id) { + found = true; + pc->GetStats( + sender, + [result_ptr](const vector> reports) { + EncodableList list; + for (int i = 0; i < reports.size(); i++) { + list.push_back(EncodableValue(statsToMap(reports[i]))); + } + EncodableMap params; + params[EncodableValue("stats")] = EncodableValue(list); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("GetStats", error); + }); + return; + } + } + if (!found) { + result_ptr->Error("GetStats", "Track not found"); + } + } else { + pc->GetStats( + [result_ptr](const vector> reports) { + EncodableList list; + for (int i = 0; i < reports.size(); i++) { + list.push_back(EncodableValue(statsToMap(reports[i]))); + } + EncodableMap params; + params[EncodableValue("stats")] = EncodableValue(list); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("GetStats", error); + }); + } +} + +void FlutterPeerConnection::MediaStreamAddTrack( + scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("audio")) { + stream->AddTrack(static_cast(track.get())); + } else if (0 == kind.compare("video")) { + stream->AddTrack(static_cast(track.get())); + } + + result_ptr->Success(); +} + +void FlutterPeerConnection::MediaStreamRemoveTrack( + scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("audio")) { + stream->RemoveTrack(static_cast(track.get())); + } else if (0 == kind.compare("video")) { + stream->RemoveTrack(static_cast(track.get())); + } + + result_ptr->Success(); +} + +void FlutterPeerConnection::AddTrack( + RTCPeerConnection* pc, + scoped_refptr track, + std::vector streamIds, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("audio")) { + auto sender = + pc->AddTrack(reinterpret_cast(track.get()), streamIds); + if (sender.get() != nullptr) { + result_ptr->Success(EncodableValue(rtpSenderToMap(sender))); + return; + } + } else if (0 == kind.compare("video")) { + auto sender = + pc->AddTrack(reinterpret_cast(track.get()), streamIds); + if (sender.get() != nullptr) { + result_ptr->Success(EncodableValue(rtpSenderToMap(sender))); + return; + } + } + result->Success(); +} + +void FlutterPeerConnection::RemoveTrack( + RTCPeerConnection* pc, + std::string senderId, + std::unique_ptr result) { + auto sender = base_->GetRtpSenderById(pc, senderId); + if (nullptr == sender.get()) { + result->Error("RemoveTrack", "not find RtpSender "); + return; + } + + EncodableMap map; + map[EncodableValue("result")] = EncodableValue(pc->RemoveTrack(sender)); + + result->Success(EncodableValue(map)); +} + +FlutterPeerConnectionObserver::FlutterPeerConnectionObserver( + FlutterWebRTCBase* base, + scoped_refptr peerconnection, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channel_name, + std::string& peerConnectionId) + : event_channel_(EventChannelProxy::Create(messenger, task_runner, channel_name)), + peerconnection_(peerconnection), + base_(base), + id_(peerConnectionId) { + peerconnection->RegisterRTCPeerConnectionObserver(this); +} + + +void FlutterPeerConnectionObserver::OnSignalingState(RTCSignalingState state) { + EncodableMap params; + params[EncodableValue("event")] = "signalingState"; + params[EncodableValue("state")] = signalingStateString(state); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnPeerConnectionState( + RTCPeerConnectionState state) { + EncodableMap params; + params[EncodableValue("event")] = "peerConnectionState"; + params[EncodableValue("state")] = peerConnectionStateString(state); + event_channel_->Success(EncodableValue(params)); +} + + +void FlutterPeerConnectionObserver::OnIceGatheringState( + RTCIceGatheringState state) { + EncodableMap params; + params[EncodableValue("event")] = "iceGatheringState"; + params[EncodableValue("state")] = iceGatheringStateString(state); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnIceConnectionState( + RTCIceConnectionState state) { + EncodableMap params; + params[EncodableValue("event")] = "iceConnectionState"; + params[EncodableValue("state")] = iceConnectionStateString(state); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnIceCandidate( + scoped_refptr candidate) { + EncodableMap params; + params[EncodableValue("event")] = "onCandidate"; + EncodableMap cand; + cand[EncodableValue("candidate")] = + EncodableValue(candidate->candidate().std_string()); + cand[EncodableValue("sdpMLineIndex")] = + EncodableValue(candidate->sdp_mline_index()); + cand[EncodableValue("sdpMid")] = + EncodableValue(candidate->sdp_mid().std_string()); + params[EncodableValue("candidate")] = EncodableValue(cand); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnAddStream( + scoped_refptr stream) { + std::string streamId = stream->id().std_string(); + + EncodableMap params; + params[EncodableValue("event")] = "onAddStream"; + params[EncodableValue("streamId")] = EncodableValue(streamId); + EncodableList audioTracks; + auto audio_tracks = stream->audio_tracks(); + for (scoped_refptr track : audio_tracks.std_vector()) { + EncodableMap audioTrack; + audioTrack[EncodableValue("id")] = EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + audioTrack[EncodableValue("enabled")] = EncodableValue(track->enabled()); + audioTrack[EncodableValue("remote")] = EncodableValue(true); + audioTrack[EncodableValue("readyState")] = "live"; + + audioTracks.push_back(EncodableValue(audioTrack)); + } + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + + EncodableList videoTracks; + auto video_tracks = stream->video_tracks(); + for (scoped_refptr track : video_tracks.std_vector()) { + EncodableMap videoTrack; + + videoTrack[EncodableValue("id")] = EncodableValue(track->id().std_string()); + videoTrack[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + videoTrack[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + videoTrack[EncodableValue("enabled")] = EncodableValue(track->enabled()); + videoTrack[EncodableValue("remote")] = EncodableValue(true); + videoTrack[EncodableValue("readyState")] = "live"; + + videoTracks.push_back(EncodableValue(videoTrack)); + } + remote_streams_[streamId] = scoped_refptr(stream); + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnRemoveStream( + scoped_refptr stream) { + EncodableMap params; + params[EncodableValue("event")] = "onRemoveStream"; + params[EncodableValue("streamId")] = + EncodableValue(stream->label().std_string()); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnAddTrack( + vector> streams, + scoped_refptr receiver) { + auto track = receiver->track(); + + std::vector> mediaStreams; + for (scoped_refptr stream : streams.std_vector()) { + mediaStreams.push_back(stream); + EncodableMap params; + params[EncodableValue("event")] = "onAddTrack"; + params[EncodableValue("streamId")] = + EncodableValue(stream->label().std_string()); + params[EncodableValue("trackId")] = + EncodableValue(track->id().std_string()); + + EncodableMap audioTrack; + audioTrack[EncodableValue("id")] = EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + audioTrack[EncodableValue("enabled")] = EncodableValue(track->enabled()); + audioTrack[EncodableValue("remote")] = EncodableValue(true); + audioTrack[EncodableValue("readyState")] = "live"; + params[EncodableValue("track")] = EncodableValue(audioTrack); + + event_channel_->Success(EncodableValue(params)); + } +} + +void FlutterPeerConnectionObserver::OnTrack( + scoped_refptr transceiver) { + auto receiver = transceiver->receiver(); + EncodableMap params; + EncodableList streams_info; + auto streams = receiver->streams(); + for (scoped_refptr item : streams.std_vector()) { + streams_info.push_back(EncodableValue(mediaStreamToMap(item, id_))); + } + params[EncodableValue("event")] = "onTrack"; + params[EncodableValue("streams")] = EncodableValue(streams_info); + params[EncodableValue("track")] = + EncodableValue(mediaTrackToMap(receiver->track())); + params[EncodableValue("receiver")] = + EncodableValue(rtpReceiverToMap(receiver)); + params[EncodableValue("transceiver")] = + EncodableValue(transceiverToMap(transceiver)); + + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnRemoveTrack( + scoped_refptr receiver) { + auto track = receiver->track(); + + EncodableMap params; + params[EncodableValue("event")] = "onRemoveTrack"; + params[EncodableValue("trackId")] = EncodableValue(track->id().std_string()); + params[EncodableValue("track")] = EncodableValue(mediaTrackToMap(track)); + params[EncodableValue("receiver")] = + EncodableValue(rtpReceiverToMap(receiver)); + event_channel_->Success(EncodableValue(params)); +} + +// void FlutterPeerConnectionObserver::OnRemoveTrack( +// scoped_refptr stream, +// scoped_refptr track) { + +// EncodableMap params; +// params[EncodableValue("event")] = "onRemoveTrack"; +// params[EncodableValue("streamId")] = stream->label(); +// params[EncodableValue("trackId")] = track->id(); +// +// EncodableMap videoTrack; +// videoTrack[EncodableValue("id")] = track->id(); +// videoTrack[EncodableValue("label")] = track->id(); +// videoTrack[EncodableValue("kind")] = track->kind(); +// videoTrack[EncodableValue("enabled")] = track->enabled(); +// videoTrack[EncodableValue("remote")] = true; +// videoTrack[EncodableValue("readyState")] = "live"; +// params[EncodableValue("track")] = videoTrack; +// +// event_channel_->Success(EncodableValue(params)); + +//} + +void FlutterPeerConnectionObserver::OnDataChannel( + scoped_refptr data_channel) { + int channel_id = data_channel->id(); + std::string channel_uuid = base_->GenerateUUID(); + + std::string event_channel = + "FlutterWebRTC/dataChannelEvent" + id_ + channel_uuid; + + std::unique_ptr observer( + new FlutterRTCDataChannelObserver(data_channel, base_->messenger_, + base_->task_runner_, + event_channel)); + + base_->lock(); + base_->data_channel_observers_[channel_uuid] = std::move(observer); + base_->unlock(); + + EncodableMap params; + params[EncodableValue("event")] = "didOpenDataChannel"; + params[EncodableValue("id")] = EncodableValue(channel_id); + params[EncodableValue("label")] = + EncodableValue(data_channel->label().std_string()); + params[EncodableValue("flutterId")] = EncodableValue(channel_uuid); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnRenegotiationNeeded() { + EncodableMap params; + params[EncodableValue("event")] = "onRenegotiationNeeded"; + event_channel_->Success(EncodableValue(params)); +} + +scoped_refptr FlutterPeerConnectionObserver::MediaStreamForId( + const std::string& id) { + auto it = remote_streams_.find(id); + if (it != remote_streams_.end()) + return (*it).second; + return nullptr; +} + +scoped_refptr FlutterPeerConnectionObserver::MediaTrackForId( + const std::string& id) { + for (auto it = remote_streams_.begin(); it != remote_streams_.end(); it++) { + auto remoteStream = (*it).second; + auto audio_tracks = remoteStream->audio_tracks(); + for (auto track : audio_tracks.std_vector()) { + if (track->id().std_string() == id) { + return track; + } + } + auto video_tracks = remoteStream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + if (track->id().std_string() == id) { + return track; + } + } + } + return nullptr; +} + +void FlutterPeerConnectionObserver::RemoveStreamForId(const std::string& id) { + auto it = remote_streams_.find(id); + if (it != remote_streams_.end()) + remote_streams_.erase(it); +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_screen_capture.cc b/common/cpp/src/flutter_screen_capture.cc new file mode 100644 index 0000000000..df660daf9b --- /dev/null +++ b/common/cpp/src/flutter_screen_capture.cc @@ -0,0 +1,282 @@ +#include "flutter_screen_capture.h" + +namespace flutter_webrtc_plugin { + +FlutterScreenCapture::FlutterScreenCapture(FlutterWebRTCBase* base) + : base_(base) {} + +bool FlutterScreenCapture::BuildDesktopSourcesList(const EncodableList& types, + bool force_reload) { + size_t size = types.size(); + sources_.clear(); + for (size_t i = 0; i < size; i++) { + std::string type_str = GetValue(types[i]); + DesktopType desktop_type = DesktopType::kScreen; + if (type_str == "screen") { + desktop_type = DesktopType::kScreen; + } else if (type_str == "window") { + desktop_type = DesktopType::kWindow; + } else { + // std::cout << "Unknown type " << type_str << std::endl; + return false; + } + scoped_refptr source_list; + auto it = medialist_.find(desktop_type); + if (it != medialist_.end()) { + source_list = (*it).second; + } else { + source_list = base_->desktop_device_->GetDesktopMediaList(desktop_type); + source_list->RegisterMediaListObserver(this); + medialist_[desktop_type] = source_list; + } + source_list->UpdateSourceList(force_reload); + int count = source_list->GetSourceCount(); + for (int j = 0; j < count; j++) { + sources_.push_back(source_list->GetSource(j)); + } + } + return true; +} + +void FlutterScreenCapture::GetDesktopSources( + const EncodableList& types, + std::unique_ptr result) { + if (!BuildDesktopSourcesList(types, true)) { + result->Error("Bad Arguments", "Failed to get desktop sources"); + return; + } + + EncodableList sources; + for (auto source : sources_) { + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("name")] = EncodableValue(source->name().std_string()); + info[EncodableValue("type")] = + EncodableValue(source->type() == kWindow ? "window" : "screen"); + // TODO "thumbnailSize" + info[EncodableValue("thumbnailSize")] = EncodableMap{ + {EncodableValue("width"), EncodableValue(0)}, + {EncodableValue("height"), EncodableValue(0)}, + }; + sources.push_back(EncodableValue(info)); + } + + std::cout << " sources: " << sources.size() << std::endl; + auto map = EncodableMap(); + map[EncodableValue("sources")] = sources; + result->Success(EncodableValue(map)); +} + +void FlutterScreenCapture::UpdateDesktopSources( + const EncodableList& types, + std::unique_ptr result) { + if (!BuildDesktopSourcesList(types, false)) { + result->Error("Bad Arguments", "Failed to update desktop sources"); + return; + } + auto map = EncodableMap(); + map[EncodableValue("result")] = true; + result->Success(EncodableValue(map)); +} + +void FlutterScreenCapture::OnMediaSourceAdded( + scoped_refptr source) { + std::cout << " OnMediaSourceAdded: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceAdded"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("name")] = EncodableValue(source->name().std_string()); + info[EncodableValue("type")] = + EncodableValue(source->type() == kWindow ? "window" : "screen"); + // TODO "thumbnailSize" + info[EncodableValue("thumbnailSize")] = EncodableMap{ + {EncodableValue("width"), EncodableValue(0)}, + {EncodableValue("height"), EncodableValue(0)}, + }; + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnMediaSourceRemoved( + scoped_refptr source) { + std::cout << " OnMediaSourceRemoved: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceRemoved"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnMediaSourceNameChanged( + scoped_refptr source) { + std::cout << " OnMediaSourceNameChanged: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceNameChanged"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("name")] = EncodableValue(source->name().std_string()); + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnMediaSourceThumbnailChanged( + scoped_refptr source) { + std::cout << " OnMediaSourceThumbnailChanged: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceThumbnailChanged"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("thumbnail")] = + EncodableValue(source->thumbnail().std_vector()); + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnStart(scoped_refptr capturer) { + // std::cout << " OnStart: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::OnPaused( + scoped_refptr capturer) { + // std::cout << " OnPaused: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::OnStop(scoped_refptr capturer) { + // std::cout << " OnStop: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::OnError(scoped_refptr capturer) { + // std::cout << " OnError: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::GetDesktopSourceThumbnail( + std::string source_id, + int width, + int height, + std::unique_ptr result) { + scoped_refptr source; + for (auto src : sources_) { + if (src->id().std_string() == source_id) { + source = src; + } + } + if (source.get() == nullptr) { + result->Error("Bad Arguments", "Failed to get desktop source thumbnail"); + return; + } + std::cout << " GetDesktopSourceThumbnail: " << source->id().std_string() + << std::endl; + source->UpdateThumbnail(); + result->Success(EncodableValue(source->thumbnail().std_vector())); +} + +void FlutterScreenCapture::GetDisplayMedia( + const EncodableMap& constraints, + std::unique_ptr result) { + std::string source_id = "0"; + // DesktopType source_type = kScreen; + double fps = 30.0; + + const EncodableMap video = findMap(constraints, "video"); + if (video != EncodableMap()) { + const EncodableMap deviceId = findMap(video, "deviceId"); + if (deviceId != EncodableMap()) { + source_id = findString(deviceId, "exact"); + if (source_id.empty()) { + result->Error("Bad Arguments", "Incorrect video->deviceId->exact"); + return; + } + if (source_id != "0") { + // source_type = DesktopType::kWindow; + } + } + const EncodableMap mandatory = findMap(video, "mandatory"); + if (mandatory != EncodableMap()) { + double frameRate = findDouble(mandatory, "frameRate"); + if (frameRate != 0.0) { + fps = frameRate; + } + } + } + + std::string uuid = base_->GenerateUUID(); + + scoped_refptr stream = + base_->factory_->CreateStream(uuid.c_str()); + + EncodableMap params; + params[EncodableValue("streamId")] = EncodableValue(uuid); + + // AUDIO + + params[EncodableValue("audioTracks")] = EncodableValue(EncodableList()); + + // VIDEO + + EncodableMap video_constraints; + auto it = constraints.find(EncodableValue("video")); + if (it != constraints.end() && TypeIs(it->second)) { + video_constraints = GetValue(it->second); + } + + scoped_refptr source; + for (auto src : sources_) { + if (src->id().std_string() == source_id) { + source = src; + } + } + + if (!source.get()) { + result->Error("Bad Arguments", "source not found!"); + return; + } + + scoped_refptr desktop_capturer = + base_->desktop_device_->CreateDesktopCapturer(source); + + if (!desktop_capturer.get()) { + result->Error("Bad Arguments", "CreateDesktopCapturer failed!"); + return; + } + + desktop_capturer->RegisterDesktopCapturerObserver(this); + + const char* video_source_label = "screen_capture_input"; + + scoped_refptr video_source = + base_->factory_->CreateDesktopSource( + desktop_capturer, video_source_label, + base_->ParseMediaConstraints(video_constraints)); + + // TODO: RTCVideoSource -> RTCVideoTrack + + scoped_refptr track = + base_->factory_->CreateVideoTrack(video_source, uuid.c_str()); + + EncodableList videoTracks; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + videoTracks.push_back(EncodableValue(info)); + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + stream->AddTrack(track); + + base_->local_tracks_[track->id().std_string()] = track; + + base_->local_streams_[uuid] = stream; + + desktop_capturer->Start(uint32_t(fps)); + + result->Success(EncodableValue(params)); +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_video_renderer.cc b/common/cpp/src/flutter_video_renderer.cc new file mode 100644 index 0000000000..77c5dd12c6 --- /dev/null +++ b/common/cpp/src/flutter_video_renderer.cc @@ -0,0 +1,185 @@ +#include "flutter_video_renderer.h" + +namespace flutter_webrtc_plugin { + +FlutterVideoRenderer::~FlutterVideoRenderer() {} + +void FlutterVideoRenderer::initialize( + TextureRegistrar* registrar, + BinaryMessenger* messenger, + TaskRunner* task_runner, + std::unique_ptr texture, + int64_t trxture_id) { + registrar_ = registrar; + texture_ = std::move(texture); + texture_id_ = trxture_id; + std::string channel_name = + "FlutterWebRTC/Texture" + std::to_string(texture_id_); + event_channel_ = EventChannelProxy::Create(messenger, task_runner, channel_name); +} + +const FlutterDesktopPixelBuffer* FlutterVideoRenderer::CopyPixelBuffer( + size_t width, + size_t height) const { + mutex_.lock(); + if (pixel_buffer_.get() && frame_.get()) { + if (pixel_buffer_->width != frame_->width() || + pixel_buffer_->height != frame_->height()) { + size_t buffer_size = + (size_t(frame_->width()) * size_t(frame_->height())) * (32 >> 3); + rgb_buffer_.reset(new uint8_t[buffer_size]); + pixel_buffer_->width = frame_->width(); + pixel_buffer_->height = frame_->height(); + } + + frame_->ConvertToARGB(RTCVideoFrame::Type::kABGR, rgb_buffer_.get(), 0, + static_cast(pixel_buffer_->width), + static_cast(pixel_buffer_->height)); + + pixel_buffer_->buffer = rgb_buffer_.get(); + mutex_.unlock(); + return pixel_buffer_.get(); + } + mutex_.unlock(); + return nullptr; +} + +void FlutterVideoRenderer::OnFrame(scoped_refptr frame) { + if (!first_frame_rendered) { + EncodableMap params; + params[EncodableValue("event")] = "didFirstFrameRendered"; + params[EncodableValue("id")] = EncodableValue(texture_id_); + event_channel_->Success(EncodableValue(params)); + pixel_buffer_.reset(new FlutterDesktopPixelBuffer()); + pixel_buffer_->width = 0; + pixel_buffer_->height = 0; + first_frame_rendered = true; + } + if (rotation_ != frame->rotation()) { + EncodableMap params; + params[EncodableValue("event")] = "didTextureChangeRotation"; + params[EncodableValue("id")] = EncodableValue(texture_id_); + params[EncodableValue("rotation")] = + EncodableValue((int32_t)frame->rotation()); + event_channel_->Success(EncodableValue(params)); + rotation_ = frame->rotation(); + } + if (last_frame_size_.width != frame->width() || + last_frame_size_.height != frame->height()) { + EncodableMap params; + params[EncodableValue("event")] = "didTextureChangeVideoSize"; + params[EncodableValue("id")] = EncodableValue(texture_id_); + params[EncodableValue("width")] = EncodableValue((int32_t)frame->width()); + params[EncodableValue("height")] = EncodableValue((int32_t)frame->height()); + event_channel_->Success(EncodableValue(params)); + + last_frame_size_ = {(size_t)frame->width(), (size_t)frame->height()}; + } + mutex_.lock(); + frame_ = frame; + mutex_.unlock(); + registrar_->MarkTextureFrameAvailable(texture_id_); +} + +void FlutterVideoRenderer::SetVideoTrack(scoped_refptr track) { + if (track_ != track) { + if (track_) + track_->RemoveRenderer(this); + track_ = track; + last_frame_size_ = {0, 0}; + first_frame_rendered = false; + if (track_) + track_->AddRenderer(this); + } +} + +bool FlutterVideoRenderer::CheckMediaStream(std::string mediaId) { + if (0 == mediaId.size() || 0 == media_stream_id.size()) { + return false; + } + return mediaId == media_stream_id; +} + +bool FlutterVideoRenderer::CheckVideoTrack(std::string mediaId) { + if (0 == mediaId.size() || !track_) { + return false; + } + return mediaId == track_->id().std_string(); +} + +FlutterVideoRendererManager::FlutterVideoRendererManager( + FlutterWebRTCBase* base) + : base_(base) {} + +void FlutterVideoRendererManager::CreateVideoRendererTexture( + std::unique_ptr result) { + auto texture = new RefCountedObject(); + auto textureVariant = + std::make_unique(flutter::PixelBufferTexture( + [texture](size_t width, + size_t height) -> const FlutterDesktopPixelBuffer* { + return texture->CopyPixelBuffer(width, height); + })); + + auto texture_id = base_->textures_->RegisterTexture(textureVariant.get()); + texture->initialize(base_->textures_, base_->messenger_, base_->task_runner_, + std::move(textureVariant), texture_id); + renderers_[texture_id] = texture; + EncodableMap params; + params[EncodableValue("textureId")] = EncodableValue(texture_id); + result->Success(EncodableValue(params)); +} + +void FlutterVideoRendererManager::VideoRendererSetSrcObject( + int64_t texture_id, + const std::string& stream_id, + const std::string& owner_tag, + const std::string& track_id) { + scoped_refptr stream = + base_->MediaStreamForId(stream_id, owner_tag); + + auto it = renderers_.find(texture_id); + if (it != renderers_.end()) { + FlutterVideoRenderer* renderer = it->second.get(); + if (stream.get()) { + auto video_tracks = stream->video_tracks(); + if (video_tracks.size() > 0) { + if (track_id == std::string()) { + renderer->SetVideoTrack(video_tracks[0]); + } else { + for (auto track : video_tracks.std_vector()) { + if (track->id().std_string() == track_id) { + renderer->SetVideoTrack(track); + break; + } + } + } + renderer->media_stream_id = stream_id; + } + } else { + renderer->SetVideoTrack(nullptr); + } + } +} + +void FlutterVideoRendererManager::VideoRendererDispose( + int64_t texture_id, + std::unique_ptr result) { + auto it = renderers_.find(texture_id); + if (it != renderers_.end()) { + it->second->SetVideoTrack(nullptr); +#if defined(_WINDOWS) + base_->textures_->UnregisterTexture(texture_id, + [&, it] { renderers_.erase(it); }); +#else + base_->textures_->UnregisterTexture(texture_id); + renderers_.erase(it); +#endif + result->Success(); + return; + } + result->Error("VideoRendererDisposeFailed", + "VideoRendererDispose() texture not found!"); +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc.cc b/common/cpp/src/flutter_webrtc.cc new file mode 100644 index 0000000000..a402e47ed9 --- /dev/null +++ b/common/cpp/src/flutter_webrtc.cc @@ -0,0 +1,1277 @@ +#include "flutter_webrtc.h" + +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +namespace flutter_webrtc_plugin { + +FlutterWebRTC::FlutterWebRTC(FlutterWebRTCPlugin* plugin) + : FlutterWebRTCBase::FlutterWebRTCBase(plugin->messenger(), + plugin->textures(), + plugin->task_runner()), + FlutterVideoRendererManager::FlutterVideoRendererManager(this), + FlutterMediaStream::FlutterMediaStream(this), + FlutterPeerConnection::FlutterPeerConnection(this), + FlutterScreenCapture::FlutterScreenCapture(this), + FlutterDataChannel::FlutterDataChannel(this), + FlutterFrameCryptor::FlutterFrameCryptor(this) {} + +FlutterWebRTC::~FlutterWebRTC() {} + +void FlutterWebRTC::HandleMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result) { + if (method_call.method_name().compare("initialize") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap options = findMap(params, "options"); + result->Success(); + } else if (method_call.method_name().compare("createPeerConnection") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap configuration = findMap(params, "configuration"); + const EncodableMap constraints = findMap(params, "constraints"); + CreateRTCPeerConnection(configuration, constraints, std::move(result)); + } else if (method_call.method_name().compare("getUserMedia") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap constraints = findMap(params, "constraints"); + GetUserMedia(constraints, std::move(result)); + } else if (method_call.method_name().compare("getDisplayMedia") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap constraints = findMap(params, "constraints"); + + GetDisplayMedia(constraints, std::move(result)); + } else if (method_call.method_name().compare("getDesktopSources") == 0) { + // types: ["screen", "window"] + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const EncodableList types = findList(params, "types"); + if (types.empty()) { + result->Error("Bad Arguments", "Types is required"); + return; + } + GetDesktopSources(types, std::move(result)); + } else if (method_call.method_name().compare("updateDesktopSources") == 0) { + // types: ["screen", "window"] + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const EncodableList types = findList(params, "types"); + if (types.empty()) { + result->Error("Bad Arguments", "Types is required"); + return; + } + UpdateDesktopSources(types, std::move(result)); + } else if (method_call.method_name().compare("getDesktopSourceThumbnail") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + std::string sourceId = findString(params, "sourceId"); + if (sourceId.empty()) { + result->Error("Bad Arguments", "Incorrect sourceId"); + return; + } + const EncodableMap thumbnailSize = findMap(params, "thumbnailSize"); + if (!thumbnailSize.empty()) { + int width = 0; + int height = 0; + GetDesktopSourceThumbnail(sourceId, width, height, std::move(result)); + } else { + result->Error("Bad Arguments", "Bad arguments received"); + } + } else if (method_call.method_name().compare("getSources") == 0) { + GetSources(std::move(result)); + } else if (method_call.method_name().compare("selectAudioInput") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string deviceId = findString(params, "deviceId"); + SelectAudioInput(deviceId, std::move(result)); + } else if (method_call.method_name().compare("selectAudioOutput") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string deviceId = findString(params, "deviceId"); + SelectAudioOutput(deviceId, std::move(result)); + } else if (method_call.method_name().compare("mediaStreamGetTracks") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string streamId = findString(params, "streamId"); + MediaStreamGetTracks(streamId, std::move(result)); + } else if (method_call.method_name().compare("createOffer") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "constraints"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("createOfferFailed", + "createOffer() peerConnection is null"); + return; + } + CreateOffer(constraints, pc, std::move(result)); + } else if (method_call.method_name().compare("createAnswer") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "constraints"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("createAnswerFailed", + "createAnswer() peerConnection is null"); + return; + } + CreateAnswer(constraints, pc, std::move(result)); + } else if (method_call.method_name().compare("addStream") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string streamId = findString(params, "streamId"); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (!stream) { + result->Error("addStreamFailed", "addStream() stream not found!"); + return; + } + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("addStreamFailed", "addStream() peerConnection is null"); + return; + } + pc->AddStream(stream); + result->Success(); + } else if (method_call.method_name().compare("removeStream") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string streamId = findString(params, "streamId"); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (!stream) { + result->Error("removeStreamFailed", "removeStream() stream not found!"); + return; + } + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("removeStreamFailed", + "removeStream() peerConnection is null"); + return; + } + pc->RemoveStream(stream); + result->Success(); + } else if (method_call.method_name().compare("setLocalDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setLocalDescriptionFailed", + "setLocalDescription() peerConnection is null"); + return; + } + + SdpParseError error; + scoped_refptr description = + RTCSessionDescription::Create(findString(constraints, "type").c_str(), + findString(constraints, "sdp").c_str(), + &error); + + if (description.get() != nullptr) { + SetLocalDescription(description.get(), pc, std::move(result)); + } else { + result->Error("setLocalDescriptionFailed", "Invalid type or sdp"); + } + } else if (method_call.method_name().compare("setRemoteDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setRemoteDescriptionFailed", + "setRemoteDescription() peerConnection is null"); + return; + } + + SdpParseError error; + scoped_refptr description = + RTCSessionDescription::Create(findString(constraints, "type").c_str(), + findString(constraints, "sdp").c_str(), + &error); + + if (description.get() != nullptr) { + SetRemoteDescription(description.get(), pc, std::move(result)); + } else { + result->Error("setRemoteDescriptionFailed", "Invalid type or sdp"); + } + } else if (method_call.method_name().compare("addCandidate") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "candidate"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("addCandidateFailed", + "addCandidate() peerConnection is null"); + return; + } + + SdpParseError error; + std::string candidate = findString(constraints, "candidate"); + if (candidate.empty()) { + // received the end-of-candidates + result->Success(); + return; + } + int sdpMLineIndex = findInt(constraints, "sdpMLineIndex"); + scoped_refptr rtc_candidate = RTCIceCandidate::Create( + candidate.c_str(), findString(constraints, "sdpMid").c_str(), + sdpMLineIndex == -1 ? 0 : sdpMLineIndex, &error); + + if (rtc_candidate.get() != nullptr) { + AddIceCandidate(rtc_candidate.get(), pc, std::move(result)); + } else { + result->Error("addCandidateFailed", "Invalid candidate"); + } + } else if (method_call.method_name().compare("getStats") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string track_id = findString(params, "trackId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getStatsFailed", "getStats() peerConnection is null"); + return; + } + GetStats(track_id, pc, std::move(result)); + } else if (method_call.method_name().compare("createDataChannel") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("createDataChannelFailed", + "createDataChannel() peerConnection is null"); + return; + } + + const std::string label = findString(params, "label"); + const EncodableMap dataChannelDict = findMap(params, "dataChannelDict"); + + CreateDataChannel(peerConnectionId, label, dataChannelDict, pc, + std::move(result)); + } else if (method_call.method_name().compare("dataChannelSend") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("dataChannelSendFailed", + "dataChannelSend() peerConnection is null"); + return; + } + + const std::string dataChannelId = findString(params, "dataChannelId"); + const std::string type = findString(params, "type"); + const EncodableValue data = findEncodableValue(params, "data"); + RTCDataChannel* data_channel = DataChannelForId(dataChannelId); + if (data_channel == nullptr) { + result->Error("dataChannelSendFailed", + "dataChannelSend() data_channel is null"); + return; + } + DataChannelSend(data_channel, type, data, std::move(result)); + } else if (method_call.method_name().compare("dataChannelGetBufferedAmount") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("dataChannelGetBufferedAmountFailed", + "dataChannelGetBufferedAmount() peerConnection is null"); + return; + } + + const std::string dataChannelId = findString(params, "dataChannelId"); + RTCDataChannel* data_channel = DataChannelForId(dataChannelId); + if (data_channel == nullptr) { + result->Error("dataChannelGetBufferedAmountFailed", + "dataChannelGetBufferedAmount() data_channel is null"); + return; + } + DataChannelGetBufferedAmount(data_channel, std::move(result)); + } else if (method_call.method_name().compare("dataChannelClose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("dataChannelCloseFailed", + "dataChannelClose() peerConnection is null"); + return; + } + + const std::string dataChannelId = findString(params, "dataChannelId"); + RTCDataChannel* data_channel = DataChannelForId(dataChannelId); + if (data_channel == nullptr) { + result->Error("dataChannelCloseFailed", + "dataChannelClose() data_channel is null"); + return; + } + DataChannelClose(data_channel, dataChannelId, std::move(result)); + } else if (method_call.method_name().compare("streamDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string stream_id = findString(params, "streamId"); + MediaStreamDispose(stream_id, std::move(result)); + } else if (method_call.method_name().compare("mediaStreamTrackSetEnable") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string track_id = findString(params, "trackId"); + const EncodableValue enable = findEncodableValue(params, "enabled"); + RTCMediaTrack* track = MediaTrackForId(track_id); + if (track != nullptr) { + track->set_enabled(GetValue(enable)); + } + result->Success(); + } else if (method_call.method_name().compare("trackDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string track_id = findString(params, "trackId"); + MediaStreamTrackDispose(track_id, std::move(result)); + } else if (method_call.method_name().compare("restartIce") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("restartIceFailed", "restartIce() peerConnection is null"); + return; + } + pc->RestartIce(); + result->Success(); + } else if (method_call.method_name().compare("peerConnectionClose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("peerConnectionCloseFailed", + "peerConnectionClose() peerConnection is null"); + return; + } + RTCPeerConnectionClose(pc, peerConnectionId, std::move(result)); + } else if (method_call.method_name().compare("peerConnectionDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Success(); + return; + } + RTCPeerConnectionDispose(pc, peerConnectionId, std::move(result)); + } else if (method_call.method_name().compare("createVideoRenderer") == 0) { + CreateVideoRendererTexture(std::move(result)); + } else if (method_call.method_name().compare("videoRendererDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + int64_t texture_id = findLongInt(params, "textureId"); + VideoRendererDispose(texture_id, std::move(result)); + } else if (method_call.method_name().compare("videoRendererSetSrcObject") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string stream_id = findString(params, "streamId"); + int64_t texture_id = findLongInt(params, "textureId"); + const std::string owner_tag = findString(params, "ownerTag"); + const std::string track_id = findString(params, "trackId"); + + VideoRendererSetSrcObject(texture_id, stream_id, owner_tag, track_id); + result->Success(); + } else if (method_call.method_name().compare( + "mediaStreamTrackSwitchCamera") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string track_id = findString(params, "trackId"); + MediaStreamTrackSwitchCamera(track_id, std::move(result)); + } else if (method_call.method_name().compare("setVolume") == 0) { + auto args = method_call.arguments(); + if (!args) { + result->Error("Bad Arguments", "setVolume() Null arguments received"); + return; + } + + const EncodableMap params = GetValue(*args); + const std::string trackId = findString(params, "trackId"); + const std::optional volume = maybeFindDouble(params, "volume"); + + if (trackId.empty()) { + result->Error("Bad Arguments", "setVolume() Empty track provided"); + return; + } + + if (!volume.has_value()) { + result->Error("Bad Arguments", "setVolume() No volume provided"); + return; + } + + if (volume.value() < 0) { + result->Error("Bad Arguments", "setVolume() Volume must be positive"); + return; + } + + RTCMediaTrack* track = MediaTrackForId(trackId); + if (nullptr == track) { + result->Error("setVolume", "setVolume() Unable to find provided track"); + return; + } + + std::string kind = track->kind().std_string(); + if (0 != kind.compare("audio")) { + result->Error("setVolume", + "setVolume() Only audio tracks can have volume set"); + return; + } + + auto audioTrack = static_cast(track); + audioTrack->SetVolume(volume.value()); + + result->Success(); + } else if (method_call.method_name().compare("getLocalDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("GetLocalDescription", + "GetLocalDescription() peerConnection is null"); + return; + } + + GetLocalDescription(pc, std::move(result)); + } else if (method_call.method_name().compare("getRemoteDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("GetRemoteDescription", + "GetRemoteDescription() peerConnection is null"); + return; + } + + GetRemoteDescription(pc, std::move(result)); + } else if (method_call.method_name().compare("mediaStreamAddTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string streamId = findString(params, "streamId"); + const std::string trackId = findString(params, "trackId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (stream == nullptr) { + result->Error("MediaStreamAddTrack", + "MediaStreamAddTrack() stream is null"); + return; + } + + scoped_refptr track = MediaTracksForId(trackId); + if (track == nullptr) { + result->Error("MediaStreamAddTrack", + "MediaStreamAddTrack() track is null"); + return; + } + + MediaStreamAddTrack(stream, track, std::move(result)); + std::string kind = track->kind().std_string(); + for (int i = 0; i < renders_.size(); i++) { + FlutterVideoRenderer* renderer = renders_.at(i).get(); + if (renderer->CheckMediaStream(streamId) && 0 == kind.compare("video")) { + renderer->SetVideoTrack(static_cast(track.get())); + } + } + } else if (method_call.method_name().compare("mediaStreamRemoveTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string streamId = findString(params, "streamId"); + const std::string trackId = findString(params, "trackId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (stream == nullptr) { + result->Error("MediaStreamRemoveTrack", + "MediaStreamRemoveTrack() stream is null"); + return; + } + + scoped_refptr track = MediaTracksForId(trackId); + if (track == nullptr) { + result->Error("MediaStreamRemoveTrack", + "MediaStreamRemoveTrack() track is null"); + return; + } + + MediaStreamRemoveTrack(stream, track, std::move(result)); + + for (int i = 0; i < renders_.size(); i++) { + FlutterVideoRenderer* renderer = renders_.at(i).get(); + if (renderer->CheckVideoTrack(streamId)) { + renderer->SetVideoTrack(nullptr); + } + } + } else if (method_call.method_name().compare("addTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string trackId = findString(params, "trackId"); + const EncodableList streamIds = findList(params, "streamIds"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("AddTrack", "AddTrack() peerConnection is null"); + return; + } + + scoped_refptr track = MediaTracksForId(trackId); + if (track == nullptr) { + result->Error("AddTrack", "AddTrack() track is null"); + return; + } + std::vector ids; + for (EncodableValue value : streamIds) { + ids.push_back(GetValue(value)); + } + + AddTrack(pc, track, ids, std::move(result)); + + } else if (method_call.method_name().compare("removeTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string senderId = findString(params, "senderId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("removeTrack", "removeTrack() peerConnection is null"); + return; + } + + RemoveTrack(pc, senderId, std::move(result)); + + } else if (method_call.method_name().compare("addTransceiver") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap transceiverInit = findMap(params, "transceiverInit"); + const std::string mediaType = findString(params, "mediaType"); + const std::string trackId = findString(params, "trackId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("addTransceiver", + "addTransceiver() peerConnection is null"); + return; + } + AddTransceiver(pc, trackId, mediaType, transceiverInit, std::move(result)); + } else if (method_call.method_name().compare("getTransceivers") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getTransceivers", + "getTransceivers() peerConnection is null"); + return; + } + + GetTransceivers(pc, std::move(result)); + } else if (method_call.method_name().compare("getReceivers") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getReceivers", "getReceivers() peerConnection is null"); + return; + } + + GetReceivers(pc, std::move(result)); + + } else if (method_call.method_name().compare("getSenders") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getSenders", "getSenders() peerConnection is null"); + return; + } + + GetSenders(pc, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetTrack", + "rtpSenderSetTrack() peerConnection is null"); + return; + } + + const std::string trackId = findString(params, "trackId"); + RTCMediaTrack* track = MediaTrackForId(trackId); + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetTrack", + "rtpSenderSetTrack() rtpSenderId is null or empty"); + return; + } + RtpSenderSetTrack(pc, track, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetStreams") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() peerConnection is null"); + return; + } + + const EncodableList encodableStreamIds = findList(params, "streamIds"); + if (encodableStreamIds.empty()) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() streamId is null or empty"); + return; + } + std::vector streamIds{}; + for (EncodableValue value : encodableStreamIds) { + streamIds.push_back(GetValue(value)); + } + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() rtpSenderId is null or empty"); + return; + } + RtpSenderSetStream(pc, streamIds, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderReplaceTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderReplaceTrack", + "rtpSenderReplaceTrack() peerConnection is null"); + return; + } + + const std::string trackId = findString(params, "trackId"); + RTCMediaTrack* track = MediaTrackForId(trackId); + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderReplaceTrack", + "rtpSenderReplaceTrack() rtpSenderId is null or empty"); + return; + } + RtpSenderReplaceTrack(pc, track, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetParameters") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() peerConnection is null"); + return; + } + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() rtpSenderId is null or empty"); + return; + } + + const EncodableMap parameters = findMap(params, "parameters"); + if (0 == parameters.size()) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() parameters is null or empty"); + return; + } + + RtpSenderSetParameters(pc, rtpSenderId, parameters, std::move(result)); + } else if (method_call.method_name().compare("rtpTransceiverStop") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpTransceiverStop", + "rtpTransceiverStop() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverStop", + "rtpTransceiverStop() transceiverId is null or empty"); + return; + } + + RtpTransceiverStop(pc, transceiverId, std::move(result)); + } else if (method_call.method_name().compare( + "rtpTransceiverGetCurrentDirection") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error( + "rtpTransceiverGetCurrentDirection", + "rtpTransceiverGetCurrentDirection() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverGetCurrentDirection", + "rtpTransceiverGetCurrentDirection() transceiverId is " + "null or empty"); + return; + } + + RtpTransceiverGetCurrentDirection(pc, transceiverId, std::move(result)); + } else if (method_call.method_name().compare("rtpTransceiverSetDirection") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() transceiverId is " + "null or empty"); + return; + } + + const std::string direction = findString(params, "direction"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() direction is null or empty"); + return; + } + + RtpTransceiverSetDirection(pc, transceiverId, direction, std::move(result)); + } else if (method_call.method_name().compare("setConfiguration") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setConfiguration", + "setConfiguration() peerConnection is null"); + return; + } + + const EncodableMap configuration = findMap(params, "configuration"); + if (configuration.empty()) { + result->Error("setConfiguration", + "setConfiguration() configuration is null or empty"); + return; + } + SetConfiguration(pc, configuration, std::move(result)); + } else if (method_call.method_name().compare("captureFrame") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string path = findString(params, "path"); + if (path.empty()) { + result->Error("captureFrame", "captureFrame() path is null or empty"); + return; + } + + const std::string trackId = findString(params, "trackId"); + RTCMediaTrack* track = MediaTrackForId(trackId); + if (nullptr == track) { + result->Error("captureFrame", "captureFrame() track is null"); + return; + } + std::string kind = track->kind().std_string(); + if (0 != kind.compare("video")) { + result->Error("captureFrame", "captureFrame() track not is video track"); + return; + } + CaptureFrame(reinterpret_cast(track), path, + std::move(result)); + + } else if (method_call.method_name().compare("createLocalMediaStream") == 0) { + CreateLocalMediaStream(std::move(result)); + } else if (method_call.method_name().compare("canInsertDtmf") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string rtpSenderId = findString(params, "rtpSenderId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("canInsertDtmf", "canInsertDtmf() peerConnection is null"); + return; + } + + auto rtpSender = GetRtpSenderById(pc, rtpSenderId); + + if (rtpSender == nullptr) { + result->Error("sendDtmf", "sendDtmf() rtpSender is null"); + return; + } + auto dtmfSender = rtpSender->dtmf_sender(); + bool canInsertDtmf = dtmfSender->CanInsertDtmf(); + + result->Success(EncodableValue(canInsertDtmf)); + } else if (method_call.method_name().compare("sendDtmf") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string rtpSenderId = findString(params, "rtpSenderId"); + const std::string tone = findString(params, "tone"); + int duration = findInt(params, "duration"); + int gap = findInt(params, "gap"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("sendDtmf", "sendDtmf() peerConnection is null"); + return; + } + + auto rtpSender = GetRtpSenderById(pc, rtpSenderId); + + if (rtpSender == nullptr) { + result->Error("sendDtmf", "sendDtmf() rtpSender is null"); + return; + } + + auto dtmfSender = rtpSender->dtmf_sender(); + dtmfSender->InsertDtmf(tone, duration, gap); + + result->Success(); + } else if (method_call.method_name().compare("getRtpSenderCapabilities") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + RTCMediaType mediaType = RTCMediaType::AUDIO; + const std::string kind = findString(params, "kind"); + if (0 == kind.compare("video")) { + mediaType = RTCMediaType::VIDEO; + } else if (0 == kind.compare("audio")) { + mediaType = RTCMediaType::AUDIO; + } else { + result->Error("getRtpSenderCapabilities", + "getRtpSenderCapabilities() kind is null or empty"); + return; + } + auto capabilities = factory_->GetRtpSenderCapabilities(mediaType); + EncodableMap map; + EncodableList codecsList; + for (auto codec : capabilities->codecs().std_vector()) { + EncodableMap codecMap; + codecMap[EncodableValue("mimeType")] = + EncodableValue(codec->mime_type().std_string()); + codecMap[EncodableValue("clockRate")] = + EncodableValue(codec->clock_rate()); + codecMap[EncodableValue("channels")] = EncodableValue(codec->channels()); + codecMap[EncodableValue("sdpFmtpLine")] = + EncodableValue(codec->sdp_fmtp_line().std_string()); + codecsList.push_back(EncodableValue(codecMap)); + } + map[EncodableValue("codecs")] = EncodableValue(codecsList); + map[EncodableValue("headerExtensions")] = EncodableValue(EncodableList()); + map[EncodableValue("fecMechanisms")] = EncodableValue(EncodableList()); + + result->Success(EncodableValue(map)); + } else if (method_call.method_name().compare("getRtpReceiverCapabilities") == + 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + + RTCMediaType mediaType = RTCMediaType::AUDIO; + const std::string kind = findString(params, "kind"); + if (0 == kind.compare("video")) { + mediaType = RTCMediaType::VIDEO; + } else if (0 == kind.compare("audio")) { + mediaType = RTCMediaType::AUDIO; + } else { + result->Error("getRtpSenderCapabilities", + "getRtpSenderCapabilities() kind is null or empty"); + return; + } + auto capabilities = factory_->GetRtpReceiverCapabilities(mediaType); + EncodableMap map; + EncodableList codecsList; + for (auto codec : capabilities->codecs().std_vector()) { + EncodableMap codecMap; + codecMap[EncodableValue("mimeType")] = + EncodableValue(codec->mime_type().std_string()); + codecMap[EncodableValue("clockRate")] = + EncodableValue(codec->clock_rate()); + codecMap[EncodableValue("channels")] = EncodableValue(codec->channels()); + codecMap[EncodableValue("sdpFmtpLine")] = + EncodableValue(codec->sdp_fmtp_line().std_string()); + codecsList.push_back(EncodableValue(codecMap)); + } + map[EncodableValue("codecs")] = EncodableValue(codecsList); + map[EncodableValue("headerExtensions")] = EncodableValue(EncodableList()); + map[EncodableValue("fecMechanisms")] = EncodableValue(EncodableList()); + + result->Success(EncodableValue(map)); + } else if (method_call.method_name().compare("setCodecPreferences") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setCodecPreferences", + "setCodecPreferences() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("setCodecPreferences", + "setCodecPreferences() transceiverId is null or empty"); + return; + } + + const EncodableList codecs = findList(params, "codecs"); + if (codecs.empty()) { + result->Error("Bad Arguments", "Codecs is required"); + return; + } + RtpTransceiverSetCodecPreferences(pc, transceiverId, codecs, + std::move(result)); + } else if (method_call.method_name().compare("getSignalingState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getSignalingState", + "getSignalingState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + signalingStateString(pc->signaling_state()); + result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("getIceGatheringState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getIceGatheringState", + "getIceGatheringState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + iceGatheringStateString(pc->ice_gathering_state()); + result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("getIceConnectionState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getIceConnectionState", + "getIceConnectionState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + iceConnectionStateString(pc->ice_connection_state()); + result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("getConnectionState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getConnectionState", + "getConnectionState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + peerConnectionStateString(pc->peer_connection_state()); + result->Success(EncodableValue(state)); + } else { + if (HandleFrameCryptorMethodCall(method_call, std::move(result), &result)) { + return; + } else { + result->NotImplemented(); + } + } +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc_base.cc b/common/cpp/src/flutter_webrtc_base.cc new file mode 100644 index 0000000000..a8c184ba15 --- /dev/null +++ b/common/cpp/src/flutter_webrtc_base.cc @@ -0,0 +1,363 @@ +#include "flutter_webrtc_base.h" + +#include "flutter_data_channel.h" +#include "flutter_peerconnection.h" + +namespace flutter_webrtc_plugin { + +const char* kEventChannelName = "FlutterWebRTC.Event"; + +FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger* messenger, + TextureRegistrar* textures, + TaskRunner *task_runner) + : messenger_(messenger), task_runner_(task_runner), textures_(textures) { + LibWebRTC::Initialize(); + factory_ = LibWebRTC::CreateRTCPeerConnectionFactory(); + audio_device_ = factory_->GetAudioDevice(); + video_device_ = factory_->GetVideoDevice(); + desktop_device_ = factory_->GetDesktopDevice(); + event_channel_ = EventChannelProxy::Create(messenger_, task_runner_, kEventChannelName); +} + +FlutterWebRTCBase::~FlutterWebRTCBase() { + LibWebRTC::Terminate(); +} + +EventChannelProxy* FlutterWebRTCBase::event_channel() { + return event_channel_ ? event_channel_.get() : nullptr; +} + +std::string FlutterWebRTCBase::GenerateUUID() { + return uuidxx::uuid::Generate().ToString(false); +} + +RTCPeerConnection* FlutterWebRTCBase::PeerConnectionForId( + const std::string& id) { + auto it = peerconnections_.find(id); + + if (it != peerconnections_.end()) + return (*it).second.get(); + + return nullptr; +} + +void FlutterWebRTCBase::RemovePeerConnectionForId(const std::string& id) { + auto it = peerconnections_.find(id); + if (it != peerconnections_.end()) + peerconnections_.erase(it); +} + +RTCMediaTrack* FlutterWebRTCBase ::MediaTrackForId(const std::string& id) { + auto it = local_tracks_.find(id); + + if (it != local_tracks_.end()) + return (*it).second.get(); + + for (auto kv : peerconnection_observers_) { + auto pco = kv.second.get(); + auto track = pco->MediaTrackForId(id); + if (track != nullptr) + return track; + } + + return nullptr; +} + +void FlutterWebRTCBase::RemoveMediaTrackForId(const std::string& id) { + auto it = local_tracks_.find(id); + if (it != local_tracks_.end()) + local_tracks_.erase(it); +} + +FlutterPeerConnectionObserver* FlutterWebRTCBase::PeerConnectionObserversForId( + const std::string& id) { + auto it = peerconnection_observers_.find(id); + + if (it != peerconnection_observers_.end()) + return (*it).second.get(); + + return nullptr; +} + +void FlutterWebRTCBase::RemovePeerConnectionObserversForId( + const std::string& id) { + auto it = peerconnection_observers_.find(id); + if (it != peerconnection_observers_.end()) + peerconnection_observers_.erase(it); +} + +scoped_refptr FlutterWebRTCBase::MediaStreamForId( + const std::string& id, std::string ownerTag) { + if (!ownerTag.empty()) { + if (ownerTag == "local") { + auto it = local_streams_.find(id); + if (it != local_streams_.end()) { + return (*it).second; + } + } else { + auto pco = peerconnection_observers_.find(ownerTag); + if (peerconnection_observers_.end() != pco) { + auto stream = pco->second->MediaStreamForId(id); + if (stream != nullptr) { + return stream; + } + } + } + } + + auto it = local_streams_.find(id); + if (it != local_streams_.end()) { + return (*it).second; + } + + return nullptr; +} + +void FlutterWebRTCBase::RemoveStreamForId(const std::string& id) { + auto it = local_streams_.find(id); + if (it != local_streams_.end()) + local_streams_.erase(it); +} + +bool FlutterWebRTCBase::ParseConstraints(const EncodableMap& constraints, + RTCConfiguration* configuration) { + memset(&configuration->ice_servers, 0, sizeof(configuration->ice_servers)); + return false; +} + +void FlutterWebRTCBase::ParseConstraints( + const EncodableMap& src, + scoped_refptr mediaConstraints, + ParseConstraintType type /*= kMandatory*/) { + for (auto kv : src) { + EncodableValue k = kv.first; + EncodableValue v = kv.second; + std::string key = GetValue(k); + std::string value; + if (TypeIs(v) || TypeIs(v)) { + } else if (TypeIs(v)) { + value = GetValue(v); + } else if (TypeIs(v)) { + value = std::to_string(GetValue(v)); + } else if (TypeIs(v)) { + value = std::to_string(GetValue(v)); + } else if (TypeIs(v)) { + value = GetValue(v) ? RTCMediaConstraints::kValueTrue + : RTCMediaConstraints::kValueFalse; + } else { + value = std::to_string(GetValue(v)); + } + if (type == kMandatory) { + mediaConstraints->AddMandatoryConstraint(key.c_str(), value.c_str()); + } else { + mediaConstraints->AddOptionalConstraint(key.c_str(), value.c_str()); + if (key == "DtlsSrtpKeyAgreement") { + configuration_.srtp_type = GetValue(v) + ? MediaSecurityType::kDTLS_SRTP + : MediaSecurityType::kSDES_SRTP; + } + } + } +} + +scoped_refptr FlutterWebRTCBase::ParseMediaConstraints( + const EncodableMap& constraints) { + scoped_refptr media_constraints = + RTCMediaConstraints::Create(); + + if (constraints.find(EncodableValue("mandatory")) != constraints.end()) { + auto it = constraints.find(EncodableValue("mandatory")); + const EncodableMap mandatory = GetValue(it->second); + ParseConstraints(mandatory, media_constraints, kMandatory); + } else { + // Log.d(TAG, "mandatory constraints are not a map"); + } + + auto it = constraints.find(EncodableValue("optional")); + if (it != constraints.end()) { + const EncodableValue optional = it->second; + if (TypeIs(optional)) { + ParseConstraints(GetValue(optional), media_constraints, + kOptional); + } else if (TypeIs(optional)) { + const EncodableList list = GetValue(optional); + for (size_t i = 0; i < list.size(); i++) { + ParseConstraints(GetValue(list[i]), media_constraints, + kOptional); + } + } + } else { + // Log.d(TAG, "optional constraints are not an array"); + } + + return media_constraints; +} + +bool FlutterWebRTCBase::CreateIceServers(const EncodableList& iceServersArray, + IceServer* ice_servers) { + size_t size = iceServersArray.size(); + for (size_t i = 0; i < size; i++) { + IceServer& ice_server = ice_servers[i]; + EncodableMap iceServerMap = GetValue(iceServersArray[i]); + + if (iceServerMap.find(EncodableValue("username")) != iceServerMap.end()) { + ice_server.username = GetValue( + iceServerMap.find(EncodableValue("username"))->second); + } + if (iceServerMap.find(EncodableValue("credential")) != iceServerMap.end()) { + ice_server.password = GetValue( + iceServerMap.find(EncodableValue("credential"))->second); + } + + auto it = iceServerMap.find(EncodableValue("url")); + if (it != iceServerMap.end() && TypeIs(it->second)) { + ice_server.uri = GetValue(it->second); + } + it = iceServerMap.find(EncodableValue("urls")); + if (it != iceServerMap.end()) { + if (TypeIs(it->second)) { + ice_server.uri = GetValue(it->second); + } + if (TypeIs(it->second)) { + const EncodableList urls = GetValue(it->second); + for (auto url : urls) { + if (TypeIs(url)) { + const EncodableMap map = GetValue(url); + std::string value; + auto it2 = map.find(EncodableValue("url")); + if (it2 != map.end()) { + ice_server.uri = GetValue(it2->second); + } + } else if (TypeIs(url)) { + ice_server.uri = GetValue(url); + } + } + } + } + } + return size > 0; +} + +bool FlutterWebRTCBase::ParseRTCConfiguration(const EncodableMap& map, + RTCConfiguration& conf) { + auto it = map.find(EncodableValue("iceServers")); + if (it != map.end()) { + const EncodableList iceServersArray = GetValue(it->second); + CreateIceServers(iceServersArray, conf.ice_servers); + } + // iceTransportPolicy (public API) + it = map.find(EncodableValue("iceTransportPolicy")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "all") // public + conf.type = IceTransportsType::kAll; + else if (v == "relay") + conf.type = IceTransportsType::kRelay; + else if (v == "nohost") + conf.type = IceTransportsType::kNoHost; + else if (v == "none") + conf.type = IceTransportsType::kNone; + } + + // bundlePolicy (public api) + it = map.find(EncodableValue("bundlePolicy")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "balanced") // public + conf.bundle_policy = kBundlePolicyBalanced; + else if (v == "max-compat") // public + conf.bundle_policy = kBundlePolicyMaxCompat; + else if (v == "max-bundle") // public + conf.bundle_policy = kBundlePolicyMaxBundle; + } + + // rtcpMuxPolicy (public api) + it = map.find(EncodableValue("rtcpMuxPolicy")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "negotiate") // public + conf.rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyNegotiate; + else if (v == "require") // public + conf.rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyRequire; + } + + // FIXME: peerIdentity of type DOMString (public API) + // FIXME: certificates of type sequence (public API) + // iceCandidatePoolSize of type unsigned short, defaulting to 0 + it = map.find(EncodableValue("iceCandidatePoolSize")); + if (it != map.end()) { + conf.ice_candidate_pool_size = GetValue(it->second); + } + + // sdpSemantics (public api) + it = map.find(EncodableValue("sdpSemantics")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "plan-b") // public + conf.sdp_semantics = SdpSemantics::kPlanB; + else if (v == "unified-plan") // public + conf.sdp_semantics = SdpSemantics::kUnifiedPlan; + } else { + conf.sdp_semantics = SdpSemantics::kUnifiedPlan; + } + + // maxIPv6Networks + it = map.find(EncodableValue("maxIPv6Networks")); + if (it != map.end()) { + conf.max_ipv6_networks = GetValue(it->second); + } + return true; +} + +scoped_refptr FlutterWebRTCBase::MediaTracksForId( + const std::string& id) { + auto it = local_tracks_.find(id); + if (it != local_tracks_.end()) { + return (*it).second; + } + + for (auto it2 : peerconnection_observers_) { + auto pco = it2.second; + auto t = pco->MediaTrackForId(id); + if (t != nullptr) { + return t; + } + } + + return nullptr; +} + +void FlutterWebRTCBase::RemoveTracksForId(const std::string& id) { + auto it = local_tracks_.find(id); + if (it != local_tracks_.end()) + local_tracks_.erase(it); +} + +libwebrtc::scoped_refptr +FlutterWebRTCBase::GetRtpSenderById(RTCPeerConnection* pc, std::string id) { + libwebrtc::scoped_refptr result; + auto senders = pc->senders(); + for (scoped_refptr item : senders.std_vector()) { + std::string itemId = item->id().std_string(); + if (nullptr == result.get() && 0 == id.compare(itemId)) { + result = item; + } + } + return result; +} + +libwebrtc::scoped_refptr +FlutterWebRTCBase::GetRtpReceiverById(RTCPeerConnection* pc, + std::string id) { + libwebrtc::scoped_refptr result; + auto receivers = pc->receivers(); + for (scoped_refptr item : receivers.std_vector()) { + std::string itemId = item->id().std_string(); + if (nullptr == result.get() && 0 == id.compare(itemId)) { + result = item; + } + } + return result; +} + +} // namespace flutter_webrtc_plugin diff --git a/common/darwin/Classes/AudioManager.h b/common/darwin/Classes/AudioManager.h new file mode 100644 index 0000000000..ec7d609cde --- /dev/null +++ b/common/darwin/Classes/AudioManager.h @@ -0,0 +1,19 @@ +#import +#import +#import "AudioProcessingAdapter.h" + +@interface AudioManager : NSObject + +@property(nonatomic, strong) RTCDefaultAudioProcessingModule* _Nonnull audioProcessingModule; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull capturePostProcessingAdapter; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull renderPreProcessingAdapter; + ++ (_Nonnull instancetype)sharedInstance; + +- (void)addLocalAudioRenderer:(nonnull id)renderer; + +- (void)removeLocalAudioRenderer:(nonnull id)renderer; + +@end diff --git a/common/darwin/Classes/AudioManager.m b/common/darwin/Classes/AudioManager.m new file mode 100644 index 0000000000..22fe818e5d --- /dev/null +++ b/common/darwin/Classes/AudioManager.m @@ -0,0 +1,50 @@ +#import "AudioManager.h" +#import "AudioProcessingAdapter.h" + +@implementation AudioManager { + RTCDefaultAudioProcessingModule* _audioProcessingModule; + AudioProcessingAdapter* _capturePostProcessingAdapter; + AudioProcessingAdapter* _renderPreProcessingAdapter; +} + +@synthesize capturePostProcessingAdapter = _capturePostProcessingAdapter; +@synthesize renderPreProcessingAdapter = _renderPreProcessingAdapter; +@synthesize audioProcessingModule = _audioProcessingModule; + ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + static AudioManager* sharedInstance = nil; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + if (self = [super init]) { + _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _capturePostProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _renderPreProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter; + _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter; + } + return self; +} + +- (void)addLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter addAudioRenderer:renderer]; +} + +- (void)removeLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter removeAudioRenderer:renderer]; +} + +- (void)addRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter addAudioRenderer:sink]; +} + +- (void)removeRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter removeAudioRenderer:sink]; +} + +@end diff --git a/common/darwin/Classes/AudioProcessingAdapter.h b/common/darwin/Classes/AudioProcessingAdapter.h new file mode 100644 index 0000000000..751a034764 --- /dev/null +++ b/common/darwin/Classes/AudioProcessingAdapter.h @@ -0,0 +1,26 @@ +#import +#import + +@protocol ExternalAudioProcessingDelegate + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels; + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) * _Nonnull)audioBuffer; + +- (void)audioProcessingRelease; + +@end + +@interface AudioProcessingAdapter : NSObject + +- (nonnull instancetype)init; + +- (void)addProcessing:(id _Nonnull)processor; + +- (void)removeProcessing:(id _Nonnull)processor; + +- (void)addAudioRenderer:(nonnull id)renderer; + +- (void)removeAudioRenderer:(nonnull id)renderer; + +@end diff --git a/common/darwin/Classes/AudioProcessingAdapter.m b/common/darwin/Classes/AudioProcessingAdapter.m new file mode 100644 index 0000000000..8e3c0beba1 --- /dev/null +++ b/common/darwin/Classes/AudioProcessingAdapter.m @@ -0,0 +1,105 @@ +#import "AudioProcessingAdapter.h" +#import +#import + +@implementation AudioProcessingAdapter { + NSMutableArray>* _renderers; + NSMutableArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)init { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [[NSMutableArray> alloc] init]; + _processors = [[NSMutableArray> alloc] init]; + } + return self; +} + +- (void)addProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + [_processors addObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + _processors = [[_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)addAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + _renderers = [[_renderers + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != renderer; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingInitializeWithSampleRate:sampleRateHz channels:channels]; + } + os_unfair_lock_unlock(&_lock); +} + +- (AVAudioPCMBuffer*)toPCMBuffer:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + AVAudioFormat* format = + [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 + sampleRate:audioBuffer.frames * 100.0 + channels:(AVAudioChannelCount)audioBuffer.channels + interleaved:NO]; + AVAudioPCMBuffer* pcmBuffer = + [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:(AVAudioFrameCount)audioBuffer.frames]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return nil; + } + pcmBuffer.frameLength = (AVAudioFrameCount)audioBuffer.frames; + for (int i = 0; i < audioBuffer.channels; i++) { + float* sourceBuffer = [audioBuffer rawBufferForChannel:i]; + int16_t* targetBuffer = (int16_t*)pcmBuffer.int16ChannelData[i]; + for (int frame = 0; frame < audioBuffer.frames; frame++) { + targetBuffer[frame] = sourceBuffer[frame]; + } + } + return pcmBuffer; +} + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingProcess:audioBuffer]; + } + + for (id renderer in _renderers) { + [renderer renderPCMBuffer:[self toPCMBuffer:audioBuffer]]; + } + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingRelease { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingRelease]; + } + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/common/darwin/Classes/AudioUtils.h b/common/darwin/Classes/AudioUtils.h new file mode 100644 index 0000000000..02d6f1c09f --- /dev/null +++ b/common/darwin/Classes/AudioUtils.h @@ -0,0 +1,15 @@ +#if TARGET_OS_IPHONE + +#import + +@interface AudioUtils : NSObject ++ (void)ensureAudioSessionWithRecording:(BOOL)recording; +// needed for wired headphones to use headphone mic ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type; ++ (void)setSpeakerphoneOn:(BOOL)enable; ++ (void)setSpeakerphoneOnButPreferBluetooth; ++ (void)deactiveRtcAudioSession; ++ (void) setAppleAudioConfiguration:(NSDictionary*)configuration; +@end + +#endif diff --git a/common/darwin/Classes/AudioUtils.m b/common/darwin/Classes/AudioUtils.m new file mode 100644 index 0000000000..a2a863b057 --- /dev/null +++ b/common/darwin/Classes/AudioUtils.m @@ -0,0 +1,229 @@ +#if TARGET_OS_IPHONE +#import "AudioUtils.h" +#import + +@implementation AudioUtils + ++ (void)ensureAudioSessionWithRecording:(BOOL)recording { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + // we also need to set default WebRTC audio configuration, since it may be activated after + // this method is called + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + // require audio session to be either PlayAndRecord or MultiRoute + if (recording && session.category != AVAudioSessionCategoryPlayAndRecord && + session.category != AVAudioSessionCategoryMultiRoute) { + config.category = AVAudioSessionCategoryPlayAndRecord; + config.categoryOptions = + AVAudioSessionCategoryOptionAllowBluetooth | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowAirPlay; + + [session lockForConfiguration]; + NSError* error = nil; + bool success = [session setCategory:config.category withOptions:config.categoryOptions error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[true]: setCategory failed due to: %@", error); + success = [session setMode:config.mode error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[true]: setMode failed due to: %@", error); + [session unlockForConfiguration]; + } else if (!recording && (session.category == AVAudioSessionCategoryAmbient || + session.category == AVAudioSessionCategorySoloAmbient)) { + config.mode = AVAudioSessionModeDefault; + [session lockForConfiguration]; + NSError* error = nil; + bool success = [session setMode:config.mode error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[false]: setMode failed due to: %@", error); + [session unlockForConfiguration]; + } +} + ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type { + RTCAudioSession* rtcSession = [RTCAudioSession sharedInstance]; + AVAudioSessionPortDescription* inputPort = nil; + for (AVAudioSessionPortDescription* port in rtcSession.session.availableInputs) { + if ([port.portType isEqualToString:type]) { + inputPort = port; + break; + } + } + if (inputPort != nil) { + NSError* errOut = nil; + [rtcSession lockForConfiguration]; + [rtcSession setPreferredInput:inputPort error:&errOut]; + [rtcSession unlockForConfiguration]; + if (errOut != nil) { + return NO; + } + return YES; + } + return NO; +} + ++ (void)setSpeakerphoneOn:(BOOL)enable { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + + if(enable && config.category != AVAudioSessionCategoryPlayAndRecord) { + NSLog(@"setSpeakerphoneOn: Category option 'defaultToSpeaker' is only applicable with category 'playAndRecord', ignore."); + return; + } + + [session lockForConfiguration]; + NSError* error = nil; + if (!enable) { + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth + error:&error]; + + success = [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); + } else { + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | + AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth + error:&error]; + + success = [session overrideOutputAudioPort:kAudioSessionProperty_OverrideAudioRoute + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); + } + [session unlockForConfiguration]; +} + ++ (void)setSpeakerphoneOnButPreferBluetooth { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + [session lockForConfiguration]; + NSError* error = nil; + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth | + AVAudioSessionCategoryOptionDefaultToSpeaker + error:&error]; + + success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOnButPreferBluetooth: Port override failed due to: %@", error); + + success = [session setActive:YES error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOnButPreferBluetooth: Audio session override failed: %@", error); + else + NSLog(@"AudioSession override with bluetooth preference via setSpeakerphoneOnButPreferBluetooth successfull "); + [session unlockForConfiguration]; +} + ++ (void)deactiveRtcAudioSession { + NSError* error = nil; + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + [session lockForConfiguration]; + if ([session isActive]) { + BOOL success = [session setActive:NO error:&error]; + if (!success) + NSLog(@"RTC Audio session deactive failed: %@", error); + else + NSLog(@"RTC AudioSession deactive is successful "); + } + [session unlockForConfiguration]; +} + + ++ (AVAudioSessionMode)audioSessionModeFromString:(NSString*)mode { + if([@"default_" isEqualToString:mode]) { + return AVAudioSessionModeDefault; + } else if([@"voicePrompt" isEqualToString:mode]) { + return AVAudioSessionModeVoicePrompt; + } else if([@"videoRecording" isEqualToString:mode]) { + return AVAudioSessionModeVideoRecording; + } else if([@"videoChat" isEqualToString:mode]) { + return AVAudioSessionModeVideoChat; + } else if([@"voiceChat" isEqualToString:mode]) { + return AVAudioSessionModeVoiceChat; + } else if([@"gameChat" isEqualToString:mode]) { + return AVAudioSessionModeGameChat; + } else if([@"measurement" isEqualToString:mode]) { + return AVAudioSessionModeMeasurement; + } else if([@"moviePlayback" isEqualToString:mode]) { + return AVAudioSessionModeMoviePlayback; + } else if([@"spokenAudio" isEqualToString:mode]) { + return AVAudioSessionModeSpokenAudio; + } + return AVAudioSessionModeDefault; +} + ++ (AVAudioSessionCategory)audioSessionCategoryFromString:(NSString *)category { + if([@"ambient" isEqualToString:category]) { + return AVAudioSessionCategoryAmbient; + } else if([@"soloAmbient" isEqualToString:category]) { + return AVAudioSessionCategorySoloAmbient; + } else if([@"playback" isEqualToString:category]) { + return AVAudioSessionCategoryPlayback; + } else if([@"record" isEqualToString:category]) { + return AVAudioSessionCategoryRecord; + } else if([@"playAndRecord" isEqualToString:category]) { + return AVAudioSessionCategoryPlayAndRecord; + } else if([@"multiRoute" isEqualToString:category]) { + return AVAudioSessionCategoryMultiRoute; + } + return AVAudioSessionCategoryAmbient; +} + ++ (void) setAppleAudioConfiguration:(NSDictionary*)configuration { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + + NSString* appleAudioCategory = configuration[@"appleAudioCategory"]; + NSArray* appleAudioCategoryOptions = configuration[@"appleAudioCategoryOptions"]; + NSString* appleAudioMode = configuration[@"appleAudioMode"]; + + [session lockForConfiguration]; + + if(appleAudioCategoryOptions != nil) { + config.categoryOptions = 0; + for(NSString* option in appleAudioCategoryOptions) { + if([@"mixWithOthers" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; + } else if([@"duckOthers" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionDuckOthers; + } else if([@"allowBluetooth" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetooth; + } else if([@"allowBluetoothA2DP" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetoothA2DP; + } else if([@"allowAirPlay" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowAirPlay; + } else if([@"defaultToSpeaker" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker; + } + } + } + + if(appleAudioCategory != nil) { + config.category = [AudioUtils audioSessionCategoryFromString:appleAudioCategory]; + [session setCategory:config.category withOptions:config.categoryOptions error:nil]; + } + + if(appleAudioMode != nil) { + config.mode = [AudioUtils audioSessionModeFromString:appleAudioMode]; + [session setMode:config.mode error:nil]; + } + + [session unlockForConfiguration]; + +} + +@end +#endif diff --git a/common/darwin/Classes/CameraUtils.h b/common/darwin/Classes/CameraUtils.h new file mode 100644 index 0000000000..d6859d88ee --- /dev/null +++ b/common/darwin/Classes/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end diff --git a/common/darwin/Classes/CameraUtils.m b/common/darwin/Classes/CameraUtils.m new file mode 100644 index 0000000000..e05d32e055 --- /dev/null +++ b/common/darwin/Classes/CameraUtils.m @@ -0,0 +1,350 @@ +#import "CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +-(AVCaptureDevice*) currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device is nil" details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device does not support torch" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:error.localizedDescription details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"device is nil" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:error.localizedDescription details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"device is nil" details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"device is nil" details:nil]); + return; + } + BOOL reset = ((NSNumber *)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)focusPoint[@"x"]).doubleValue; + y = ((NSNumber *)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void) applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result{ +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"device is nil" details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"device is nil" details:nil]); + return; + } + + BOOL reset = ((NSNumber *)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber *)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Exposure point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + + +@end diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.h b/common/darwin/Classes/FlutterRPScreenRecorder.h new file mode 100644 index 0000000000..638cfb977b --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1,13 @@ +#if TARGET_OS_IPHONE +#import +@interface FlutterRPScreenRecorder : RTCVideoCapturer + +- (void)startCapture; + +// Stops the capture session asynchronously and notifies callback on completion. +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +- (void)stopCapture; + +@end +#endif diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.m b/common/darwin/Classes/FlutterRPScreenRecorder.m new file mode 100644 index 0000000000..8abe8d3fad --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1,94 @@ +#import "FlutterRPScreenRecorder.h" +#if TARGET_OS_IPHONE +#import + +// See: https://developer.apple.com/videos/play/wwdc2017/606/ + +@implementation FlutterRPScreenRecorder { + RPScreenRecorder* screenRecorder; + RTCVideoSource* source; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + source = delegate; + return [super initWithDelegate:delegate]; +} + +- (void)startCapture { + if (screenRecorder == NULL) + screenRecorder = [RPScreenRecorder sharedRecorder]; + + [screenRecorder setMicrophoneEnabled:NO]; + + if (![screenRecorder isAvailable]) { + NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available!"); + return; + } + + if (@available(iOS 11.0, *)) { + [screenRecorder + startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, + RPSampleBufferType bufferType, NSError* _Nullable error) { + if (bufferType == RPSampleBufferTypeVideo) { // We want video only now + [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; + } + } + completionHandler:^(NSError* _Nullable error) { + if (error != nil) + NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); + }]; + } else { + // Fallback on earlier versions + NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available in versions " + @"lower than iOS 11 !"); + } +} + +- (void)stopCapture { + if (@available(iOS 11.0, *)) { + [screenRecorder stopCaptureWithHandler:^(NSError* _Nullable error) { + if (error != nil) + NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); + }]; + } else { + // Fallback on earlier versions + NSLog(@"FlutterRPScreenRecorder.stopCapture: Screen recorder is not available in versions " + @"lower than iOS 11 !"); + } +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if (completionHandler != nil) { + completionHandler(); + } +} + +- (void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer + sampleType:(RPSampleBufferType)sampleType { + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + [source adaptOutputFormatToWidth:(int)(width / 2) height:(int)(height / 2) fps:8]; + + RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t timeStampNs = + CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end +#endif diff --git a/common/darwin/Classes/FlutterRTCAudioSink-Interface.h b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h new file mode 100644 index 0000000000..8a0352333d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1,6 @@ +void RTCAudioSinkCallback (void *object, + const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames); diff --git a/common/darwin/Classes/FlutterRTCAudioSink.h b/common/darwin/Classes/FlutterRTCAudioSink.h new file mode 100644 index 0000000000..34cf46669c --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.h @@ -0,0 +1,14 @@ +#import +#import +#import + +@interface FlutterRTCAudioSink : NSObject + +@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef); +@property (nonatomic) CMAudioFormatDescriptionRef format; + +- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio; + +- (void) close; + +@end diff --git a/common/darwin/Classes/FlutterRTCAudioSink.mm b/common/darwin/Classes/FlutterRTCAudioSink.mm new file mode 100644 index 0000000000..4fb575b398 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1,67 @@ +#import +#import "FlutterRTCAudioSink.h" +#import "RTCAudioSource+Private.h" +#include "media_stream_interface.h" +#include "audio_sink_bridge.cpp" + +@implementation FlutterRTCAudioSink { + AudioSinkBridge *_bridge; + webrtc::AudioSourceInterface* _audioSource; +} + +- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio { + self = [super init]; + rtc::scoped_refptr audioSourcePtr = audio.source.nativeAudioSource; + _audioSource = audioSourcePtr.get(); + _bridge = new AudioSinkBridge((void*)CFBridgingRetain(self)); + _audioSource->AddSink(_bridge); + return self; +} + +- (void) close { + _audioSource->RemoveSink(_bridge); + delete _bridge; + _bridge = nil; + _audioSource = nil; +} + +void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames) +{ + AudioBufferList audioBufferList; + AudioBuffer audioBuffer; + audioBuffer.mData = (void*) audio_data; + audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames; + audioBuffer.mNumberChannels = number_of_channels; + audioBufferList.mNumberBuffers = 1; + audioBufferList.mBuffers[0] = audioBuffer; + AudioStreamBasicDescription audioDescription; + audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels; + audioDescription.mBitsPerChannel = bits_per_sample; + audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels; + audioDescription.mChannelsPerFrame = number_of_channels; + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; + audioDescription.mFramesPerPacket = 1; + audioDescription.mReserved = 0; + audioDescription.mSampleRate = sample_rate; + CMAudioFormatDescriptionRef formatDesc; + CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc); + CMSampleBufferRef buffer; + CMSampleTimingInfo timing; + timing.decodeTimeStamp = kCMTimeInvalid; + timing.presentationTimeStamp = CMTimeMake(0, sample_rate); + timing.duration = CMTimeMake(1, sample_rate); + CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer); + CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList); + @autoreleasepool { + FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object); + sink.format = formatDesc; + if (sink.bufferCallback != nil) { + sink.bufferCallback(buffer); + } else { + NSLog(@"Buffer callback is nil"); + } + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCDataChannel.h b/common/darwin/Classes/FlutterRTCDataChannel.h new file mode 100644 index 0000000000..2b1d685274 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.h @@ -0,0 +1,30 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCDataChannel (Flutter) +@property(nonatomic, strong, nonnull) NSString* peerConnectionId; +@property(nonatomic, strong, nonnull) NSString* flutterChannelId; +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@property(nonatomic, strong, nullable) NSArray* eventQueue; +@end + +@interface FlutterWebRTCPlugin (RTCDataChannel) + +- (void)createDataChannel:(nonnull NSString*)peerConnectionId + label:(nonnull NSString*)label + config:(nonnull RTCDataChannelConfiguration*)config + messenger:(nonnull NSObject*)messenger + result:(nonnull FlutterResult)result; + +- (void)dataChannelClose:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId; + +- (void)dataChannelSend:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + data:(nonnull NSString*)data + type:(nonnull NSString*)type; + +- (void)dataChannelGetBufferedAmount:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + result:(nonnull FlutterResult)result; +@end diff --git a/common/darwin/Classes/FlutterRTCDataChannel.m b/common/darwin/Classes/FlutterRTCDataChannel.m new file mode 100644 index 0000000000..67e1083b2f --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.m @@ -0,0 +1,219 @@ +#import "FlutterRTCDataChannel.h" +#import +#import +#import "FlutterRTCPeerConnection.h" + +@implementation RTCDataChannel (Flutter) + +- (NSString*)peerConnectionId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setPeerConnectionId:(NSString*)peerConnectionId { + objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSArray*)eventQueue { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventQueue:(NSArray*)eventQueue { + objc_setAssociatedObject(self, @selector(eventQueue), eventQueue, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSNumber*)flutterChannelId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterChannelId:(NSNumber*)flutterChannelId { + objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + NSEnumerator* enumerator = [self.eventQueue objectEnumerator]; + id event; + while ((event = enumerator.nextObject) != nil) { + postEvent(sink, event); + }; + self.eventQueue = nil; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (RTCDataChannel) + +- (void)createDataChannel:(nonnull NSString*)peerConnectionId + label:(NSString*)label + config:(RTCDataChannelConfiguration*)config + messenger:(NSObject*)messenger + result:(nonnull FlutterResult)result { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; + + if (nil != dataChannel) { + dataChannel.peerConnectionId = peerConnectionId; + NSString* flutterId = [[NSUUID UUID] UUIDString]; + peerConnection.dataChannels[flutterId] = dataChannel; + dataChannel.flutterChannelId = flutterId; + dataChannel.delegate = self; + dataChannel.eventQueue = nil; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", + peerConnectionId, flutterId] + binaryMessenger:messenger]; + + dataChannel.eventChannel = eventChannel; + [eventChannel setStreamHandler:dataChannel]; + + result(@{ + @"label" : label, + @"id" : [NSNumber numberWithInt:dataChannel.channelId], + @"flutterId" : flutterId + }); + } +} + +- (void)dataChannelClose:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + RTCDataChannel* dataChannel = dataChannels[dataChannelId]; + if (dataChannel) { + FlutterEventChannel* eventChannel = dataChannel.eventChannel; + [dataChannel close]; + [dataChannels removeObjectForKey:dataChannelId]; + [eventChannel setStreamHandler:nil]; + dataChannel.eventChannel = nil; + } +} + +- (void)dataChannelGetBufferedAmount:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + result:(nonnull FlutterResult)result { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId]; + if(dataChannel == NULL || dataChannel.readyState != RTCDataChannelStateOpen) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", @"dataChannelGetBufferedAmount"] + message:[NSString stringWithFormat:@"Error: dataChannel not found or not opened!"] + details:nil]); + } else { + result(@{@"bufferedAmount": @(dataChannel.bufferedAmount)}); + } +} + +- (void)dataChannelSend:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + data:(id)data + type:(NSString*)type { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId]; + + NSData* bytes = [type isEqualToString:@"binary"] ? ((FlutterStandardTypedData*)data).data + : [data dataUsingEncoding:NSUTF8StringEncoding]; + + RTCDataBuffer* buffer = [[RTCDataBuffer alloc] initWithData:bytes + isBinary:[type isEqualToString:@"binary"]]; + [dataChannel sendData:buffer]; +} + +- (NSString*)stringForDataChannelState:(RTCDataChannelState)state { + switch (state) { + case RTCDataChannelStateConnecting: + return @"connecting"; + case RTCDataChannelStateOpen: + return @"open"; + case RTCDataChannelStateClosing: + return @"closing"; + case RTCDataChannelStateClosed: + return @"closed"; + } + return nil; +} + +- (void)sendEvent:(id)event withChannel:(RTCDataChannel*)channel { + if (channel.eventSink) { + postEvent(channel.eventSink, event); + } else { + if (!channel.eventQueue) { + channel.eventQueue = [NSMutableArray array]; + } + channel.eventQueue = [channel.eventQueue arrayByAddingObject:event]; + } +} + +#pragma mark - RTCDataChannelDelegate methods + +// Called when the data channel state has changed. +- (void)dataChannelDidChangeState:(RTCDataChannel*)channel { + [self sendEvent:@{ + @"event" : @"dataChannelStateChanged", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"state" : [self stringForDataChannelState:channel.readyState] + } + withChannel:channel]; +} + +// Called when a data buffer was successfully received. +- (void)dataChannel:(RTCDataChannel*)channel didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer { + NSString* type; + id data; + if (buffer.isBinary) { + type = @"binary"; + data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; + } else { + type = @"text"; + data = [[NSString alloc] initWithData:buffer.data encoding:NSUTF8StringEncoding]; + } + + [self sendEvent:@{ + @"event" : @"dataChannelReceiveMessage", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"type" : type, + @"data" : (data ? data : [NSNull null]) + } + withChannel:channel]; +} + +- (void)dataChannel:(RTCDataChannel*)channel didChangeBufferedAmount:(uint64_t)amount { + [self sendEvent:@{ + @"event" : @"dataChannelBufferedAmountChange", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"bufferedAmount" : [NSNumber numberWithLongLong:channel.bufferedAmount], + @"changedAmount" : [NSNumber numberWithLongLong:amount] + } + withChannel:channel]; +} + +@end diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.h b/common/darwin/Classes/FlutterRTCDesktopCapturer.h new file mode 100644 index 0000000000..75dea33345 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.h @@ -0,0 +1,22 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import +#import + +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (DesktopCapturer) + +- (void)getDisplayMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)getDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result; + +- (void)updateDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result; + +- (void)getDesktopSourceThumbnail:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.m b/common/darwin/Classes/FlutterRTCDesktopCapturer.m new file mode 100644 index 0000000000..fd88a8b689 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.m @@ -0,0 +1,429 @@ +#import + +#import "FlutterRTCDesktopCapturer.h" + +#if TARGET_OS_IPHONE +#import +#import "FlutterBroadcastScreenCapturer.h" +#import "FlutterRPScreenRecorder.h" +#endif + +#import "VideoProcessingAdapter.h" +#import "LocalVideoTrack.h" + +#if TARGET_OS_OSX +RTCDesktopMediaList* _screen = nil; +RTCDesktopMediaList* _window = nil; +NSArray* _captureSources; +#endif + +@implementation FlutterWebRTCPlugin (DesktopCapturer) + +- (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result { + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + RTCVideoSource* videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES]; + NSString* trackUUID = [[NSUUID UUID] UUIDString]; + VideoProcessingAdapter *videoProcessingAdapter = [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + +#if TARGET_OS_IPHONE + BOOL useBroadcastExtension = false; + BOOL presentBroadcastPicker = false; + + id videoConstraints = constraints[@"video"]; + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.deviceId + useBroadcastExtension = + [((NSDictionary*)videoConstraints)[@"deviceId"] hasPrefix:@"broadcast"]; + presentBroadcastPicker = + useBroadcastExtension && + ![((NSDictionary*)videoConstraints)[@"deviceId"] hasSuffix:@"-manual"]; + } + + id screenCapturer; + + if (useBroadcastExtension) { + screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoProcessingAdapter]; + } else { + screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:[videoProcessingAdapter source]]; + } + + [screenCapturer startCapture]; + NSLog(@"start %@ capture", useBroadcastExtension ? @"broadcast" : @"replykit"); + + self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) { + NSLog(@"stop %@ capture, trackID %@", useBroadcastExtension ? @"broadcast" : @"replykit", + trackUUID); + [screenCapturer stopCaptureWithCompletionHandler:handler]; + }; + + if (presentBroadcastPicker) { + NSString* extension = + [[[NSBundle mainBundle] infoDictionary] valueForKey:kRTCScreenSharingExtension]; + + RPSystemBroadcastPickerView* picker = [[RPSystemBroadcastPickerView alloc] init]; + picker.showsMicrophoneButton = false; + if (extension) { + picker.preferredExtension = extension; + } else { + NSLog(@"Not able to find the %@ key", kRTCScreenSharingExtension); + } + SEL selector = NSSelectorFromString(@"buttonPressed:"); + if ([picker respondsToSelector:selector]) { + [picker performSelector:selector withObject:nil]; + } + } +#endif + +#if TARGET_OS_OSX + /* example for constraints: + { + 'audio': false, + 'video": { + 'deviceId': {'exact': sourceId}, + 'mandatory': { + 'frameRate': 30.0 + }, + } + } + */ + NSString* sourceId = nil; + BOOL useDefaultScreen = NO; + NSInteger fps = 30; + id videoConstraints = constraints[@"video"]; + if ([videoConstraints isKindOfClass:[NSNumber class]] && [videoConstraints boolValue] == YES) { + useDefaultScreen = YES; + } else if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + NSDictionary* deviceId = videoConstraints[@"deviceId"]; + if (deviceId != nil && [deviceId isKindOfClass:[NSDictionary class]]) { + if (deviceId[@"exact"] != nil) { + sourceId = deviceId[@"exact"]; + if (sourceId == nil) { + result(@{@"error" : @"No deviceId.exact found"}); + return; + } + } + } else { + // fall back to default screen if no deviceId is specified + useDefaultScreen = YES; + } + id mandatory = videoConstraints[@"mandatory"]; + if (mandatory != nil && [mandatory isKindOfClass:[NSDictionary class]]) { + id frameRate = mandatory[@"frameRate"]; + if (frameRate != nil && [frameRate isKindOfClass:[NSNumber class]]) { + fps = [frameRate integerValue]; + } + } + } + RTCDesktopCapturer* desktopCapturer; + RTCDesktopSource* source = nil; + + if (useDefaultScreen) { + desktopCapturer = [[RTCDesktopCapturer alloc] initWithDefaultScreen:self + captureDelegate:videoProcessingAdapter]; + } else { + source = [self getSourceById:sourceId]; + if (source == nil) { + result(@{@"error" : [NSString stringWithFormat:@"No source found for id: %@", sourceId]}); + return; + } + desktopCapturer = [[RTCDesktopCapturer alloc] initWithSource:source + delegate:self + captureDelegate:videoProcessingAdapter]; + } + [desktopCapturer startCaptureWithFPS:fps]; + NSLog(@"start desktop capture: sourceId: %@, type: %@, fps: %lu", sourceId, + source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", fps); + + self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) { + NSLog(@"stop desktop capture: sourceId: %@, type: %@, trackID %@", sourceId, + source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", trackUUID); + [desktopCapturer stopCapture]; + handler(); + }; +#endif + + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; + + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCVideoTrack* track in mediaStream.videoTracks) { + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result( + @{@"streamId" : mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks}); +} + +- (void)getDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"getDesktopSources"); + + NSArray* types = [argsMap objectForKey:@"types"]; + if (types == nil) { + result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]); + return; + } + + if (![self buildDesktopSourcesListWithTypes:types forceReload:YES result:result]) { + NSLog(@"getDesktopSources failed."); + return; + } + + NSMutableArray* sources = [NSMutableArray array]; + NSEnumerator* enumerator = [_captureSources objectEnumerator]; + RTCDesktopSource* object; + while ((object = enumerator.nextObject) != nil) { + /*NSData *data = nil; + if([object thumbnail]) { + data = [[NSData alloc] init]; + NSImage *resizedImg = [self resizeImage:[object thumbnail] forSize:NSMakeSize(320, 180)]; + data = [resizedImg TIFFRepresentation]; + }*/ + [sources addObject:@{ + @"id" : object.sourceId, + @"name" : object.name, + @"thumbnailSize" : @{@"width" : @0, @"height" : @0}, + @"type" : object.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", + //@"thumbnail": data, + }]; + } + result(@{@"sources" : sources}); +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +- (void)getDesktopSourceThumbnail:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"getDesktopSourceThumbnail"); + NSString* sourceId = argsMap[@"sourceId"]; + RTCDesktopSource* object = [self getSourceById:sourceId]; + if (object == nil) { + result(@{@"error" : @"No source found"}); + return; + } + NSImage* image = [object UpdateThumbnail]; + if (image != nil) { + NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; + NSData* data = [resizedImg TIFFRepresentation]; + result(data); + } else { + result(@{@"error" : @"No thumbnail found"}); + } + +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +- (void)updateDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"updateDesktopSources"); + NSArray* types = [argsMap objectForKey:@"types"]; + if (types == nil) { + result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]); + return; + } + if (![self buildDesktopSourcesListWithTypes:types forceReload:NO result:result]) { + NSLog(@"updateDesktopSources failed."); + return; + } + result(@{@"result" : @YES}); +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +#if TARGET_OS_OSX +- (NSImage*)resizeImage:(NSImage*)sourceImage forSize:(CGSize)targetSize { + CGSize imageSize = sourceImage.size; + CGFloat width = imageSize.width; + CGFloat height = imageSize.height; + CGFloat targetWidth = targetSize.width; + CGFloat targetHeight = targetSize.height; + CGFloat scaleFactor = 0.0; + CGFloat scaledWidth = targetWidth; + CGFloat scaledHeight = targetHeight; + CGPoint thumbnailPoint = CGPointMake(0.0, 0.0); + + if (CGSizeEqualToSize(imageSize, targetSize) == NO) { + CGFloat widthFactor = targetWidth / width; + CGFloat heightFactor = targetHeight / height; + + // scale to fit the longer + scaleFactor = (widthFactor > heightFactor) ? widthFactor : heightFactor; + scaledWidth = ceil(width * scaleFactor); + scaledHeight = ceil(height * scaleFactor); + + // center the image + if (widthFactor > heightFactor) { + thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5; + } else if (widthFactor < heightFactor) { + thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5; + } + } + + NSImage* newImage = [[NSImage alloc] initWithSize:NSMakeSize(scaledWidth, scaledHeight)]; + CGRect thumbnailRect = {thumbnailPoint, {scaledWidth, scaledHeight}}; + NSRect imageRect = NSMakeRect(0.0, 0.0, width, height); + + [newImage lockFocus]; + [sourceImage drawInRect:thumbnailRect fromRect:imageRect operation:NSCompositingOperationCopy fraction:1.0]; + [newImage unlockFocus]; + + return newImage; +} + +- (RTCDesktopSource*)getSourceById:(NSString*)sourceId { + NSEnumerator* enumerator = [_captureSources objectEnumerator]; + RTCDesktopSource* object; + while ((object = enumerator.nextObject) != nil) { + if ([sourceId isEqualToString:object.sourceId]) { + return object; + } + } + return nil; +} + +- (BOOL)buildDesktopSourcesListWithTypes:(NSArray*)types + forceReload:(BOOL)forceReload + result:(FlutterResult)result { + BOOL captureWindow = NO; + BOOL captureScreen = NO; + _captureSources = [NSMutableArray array]; + + NSEnumerator* typesEnumerator = [types objectEnumerator]; + NSString* type; + while ((type = typesEnumerator.nextObject) != nil) { + if ([type isEqualToString:@"screen"]) { + captureScreen = YES; + } else if ([type isEqualToString:@"window"]) { + captureWindow = YES; + } else { + result([FlutterError errorWithCode:@"ERROR" message:@"Invalid type" details:nil]); + return NO; + } + } + + if (!captureWindow && !captureScreen) { + result([FlutterError errorWithCode:@"ERROR" + message:@"At least one type is required" + details:nil]); + return NO; + } + + if (captureWindow) { + if (!_window) + _window = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeWindow delegate:self]; + [_window UpdateSourceList:forceReload updateAllThumbnails:YES]; + NSArray* sources = [_window getSources]; + _captureSources = [_captureSources arrayByAddingObjectsFromArray:sources]; + } + if (captureScreen) { + if (!_screen) + _screen = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeScreen delegate:self]; + [_screen UpdateSourceList:forceReload updateAllThumbnails:YES]; + NSArray* sources = [_screen getSources]; + _captureSources = [_captureSources arrayByAddingObjectsFromArray:sources]; + } + NSLog(@"captureSources: %lu", [_captureSources count]); + return YES; +} + +#pragma mark - RTCDesktopMediaListDelegate delegate + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceAdded: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + NSImage* image = [source UpdateThumbnail]; + NSData* data = [[NSData alloc] init]; + if (image != nil) { + NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; + data = [resizedImg TIFFRepresentation]; + } + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceAdded", + @"id" : source.sourceId, + @"name" : source.name, + @"thumbnailSize" : @{@"width" : @0, @"height" : @0}, + @"type" : source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", + @"thumbnail" : data + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceRemoved: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceRemoved", + @"id" : source.sourceId, + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceNameChanged: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceNameChanged", + @"id" : source.sourceId, + @"name" : source.name, + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceThumbnailChanged: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + NSImage* resizedImg = [self resizeImage:[source thumbnail] forSize:NSMakeSize(320, 180)]; + NSData* data = [resizedImg TIFFRepresentation]; + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceThumbnailChanged", + @"id" : source.sourceId, + @"thumbnail" : data + }); + } +} + +#pragma mark - RTCDesktopCapturerDelegate delegate + +- (void)didSourceCaptureStart:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureStart"); +} + +- (void)didSourceCapturePaused:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCapturePaused"); +} + +- (void)didSourceCaptureStop:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureStop"); +} + +- (void)didSourceCaptureError:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureError"); +} + +#endif + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.h b/common/darwin/Classes/FlutterRTCFrameCapturer.h new file mode 100644 index 0000000000..7cc0ff28c2 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1,17 @@ +#import + +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +@interface FlutterRTCFrameCapturer : NSObject + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result; + ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame; + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m new file mode 100644 index 0000000000..fe748b1223 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1,175 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import "FlutterRTCFrameCapturer.h" + +@import CoreImage; +@import CoreVideo; + +@implementation FlutterRTCFrameCapturer { + RTCVideoTrack* _track; + NSString* _path; + FlutterResult _result; + bool _gotFrame; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result { + self = [super init]; + if (self) { + _gotFrame = false; + _track = track; + _path = path; + _result = result; + [track addRenderer:self]; + } + return self; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame*)frame { + if (_gotFrame || frame == nil) + return; + _gotFrame = true; + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + bool shouldRelease; + if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } else { + pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer; + shouldRelease = false; + } + CIImage* ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; + CGRect outputSize; + if (@available(iOS 11, macOS 10.13, *)) { + switch (frame.rotation) { + case RTCVideoRotation_90: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight]; + outputSize = CGRectMake(0, 0, frame.height, frame.width); + break; + case RTCVideoRotation_180: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown]; + outputSize = CGRectMake(0, 0, frame.width, frame.height); + break; + case RTCVideoRotation_270: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft]; + outputSize = CGRectMake(0, 0, frame.height, frame.width); + break; + default: + outputSize = CGRectMake(0, 0, frame.width, frame.height); + break; + } + } else { + outputSize = CGRectMake(0, 0, frame.width, frame.height); + } + CIContext* tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData* imageData; +#if TARGET_OS_IPHONE + UIImage* uiImage = [UIImage imageWithCGImage:cgImage]; + if ([[_path pathExtension] isEqualToString:@"jpg"]) { + imageData = UIImageJPEGRepresentation(uiImage, 1.0f); + } else { + imageData = UIImagePNGRepresentation(uiImage); + } +#else + NSBitmapImageRep* newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + NSDictionary* quality = @{NSImageCompressionFactor : @1.0f}; + if ([[_path pathExtension] isEqualToString:@"jpg"]) { + imageData = [newRep representationUsingType:NSBitmapImageFileTypeJPEG properties:quality]; + } else { + imageData = [newRep representationUsingType:NSBitmapImageFileTypePNG properties:quality]; + } +#endif + CGImageRelease(cgImage); + if (shouldRelease) + CVPixelBufferRelease(pixelBufferRef); + if (imageData && [imageData writeToFile:_path atomically:NO]) { + NSLog(@"File writed successfully to %@", _path); + _result(nil); + } else { + NSLog(@"Failed to write to file"); + _result([FlutterError errorWithCode:@"CaptureFrameFailed" + message:@"Failed to write image data to file" + details:nil]); + } + dispatch_async(dispatch_get_main_queue(), ^{ + [self->_track removeRenderer:self]; + self->_track = nil; + }); +} + ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame { + id i420Buffer = [frame.buffer toI420]; + CVPixelBufferRef outputPixelBuffer; + size_t w = (size_t)roundf(i420Buffer.width); + size_t h = (size_t)roundf(i420Buffer.height); + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer); + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + [RTCYUVHelper I420ToARGB:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstARGB:dst + dstStrideARGB:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + [RTCYUVHelper I420ToBGRA:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstBGRA:dst + dstStrideBGRA:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } + } + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); + return outputPixelBuffer; +} + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCryptor.h b/common/darwin/Classes/FlutterRTCFrameCryptor.h new file mode 100644 index 0000000000..0e7a2f6007 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCryptor.h @@ -0,0 +1,51 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +#import "FlutterWebRTCPlugin.h" + +@interface RTCFrameCryptor (Flutter) +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@end + + +@interface FlutterWebRTCPlugin (FrameCryptor) + +- (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call result:(nonnull FlutterResult)result; + +- (void)frameCryptorFactoryCreateFrameCryptor:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorSetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorGetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorSetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorGetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorFactoryCreateKeyProvider:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderSetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderRatchetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCryptor.m b/common/darwin/Classes/FlutterRTCFrameCryptor.m new file mode 100644 index 0000000000..b75afc12aa --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCryptor.m @@ -0,0 +1,603 @@ +#import "FlutterRTCFrameCryptor.h" + +#import + +@implementation RTCFrameCryptor (Flutter) + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FrameCryptor) + +- (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call + result:(nonnull FlutterResult)result { + NSDictionary* constraints = call.arguments; + NSString* method = call.method; + if ([method isEqualToString:@"frameCryptorFactoryCreateFrameCryptor"]) { + [self frameCryptorFactoryCreateFrameCryptor:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorSetKeyIndex"]) { + [self frameCryptorSetKeyIndex:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorGetKeyIndex"]) { + [self frameCryptorGetKeyIndex:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorSetEnabled"]) { + [self frameCryptorSetEnabled:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorGetEnabled"]) { + [self frameCryptorGetEnabled:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorDispose"]) { + [self frameCryptorDispose:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorFactoryCreateKeyProvider"]) { + [self frameCryptorFactoryCreateKeyProvider:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetSharedKey"]) { + [self keyProviderSetSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderRatchetSharedKey"]) { + [self keyProviderRatchetSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderExportSharedKey"]) { + [self keyProviderExportSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetKey"]) { + [self keyProviderSetKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderRatchetKey"]) { + [self keyProviderRatchetKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderExportKey"]) { + [self keyProviderExportKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetSifTrailer"]) { + [self keyProviderSetSifTrailer:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderDispose"]) { + [self keyProviderDispose:constraints result:result]; + } else { + result(FlutterMethodNotImplemented); + } +} + +- (RTCCryptorAlgorithm)getAlgorithm:(NSNumber*)algorithm { + switch ([algorithm intValue]) { + case 0: + return RTCCryptorAlgorithmAesGcm; + default: + return RTCCryptorAlgorithmAesGcm; + } +} + +- (void)frameCryptorFactoryCreateFrameCryptor:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* peerConnectionId = constraints[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSNumber* algorithm = constraints[@"algorithm"]; + if (algorithm == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid algorithm" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSString* keyProviderId = constraints[@"keyProviderId"]; + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid keyProviderId" + details:nil]); + return; + } + + RTCFrameCryptorKeyProvider* keyProvider = self.keyProviders[keyProviderId]; + if (keyProvider == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid keyProvider" + details:nil]); + return; + } + + NSString* type = constraints[@"type"]; + NSString* rtpSenderId = constraints[@"rtpSenderId"]; + NSString* rtpReceiverId = constraints[@"rtpReceiverId"]; + + if ([type isEqualToString:@"sender"]) { + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:rtpSenderId]; + if (sender == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + + RTCFrameCryptor* frameCryptor = + [[RTCFrameCryptor alloc] initWithFactory:self.peerConnectionFactory + rtpSender:sender + participantId:participantId + algorithm:[self getAlgorithm:algorithm] + keyProvider:keyProvider]; + NSString* frameCryptorId = [[NSUUID UUID] UUIDString]; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/frameCryptorEvent%@", + frameCryptorId] + binaryMessenger:self.messenger]; + + frameCryptor.eventChannel = eventChannel; + [eventChannel setStreamHandler:frameCryptor]; + frameCryptor.delegate = self; + + self.frameCryptors[frameCryptorId] = frameCryptor; + result(@{@"frameCryptorId" : frameCryptorId}); + } else if ([type isEqualToString:@"receiver"]) { + RTCRtpReceiver* receiver = [self getRtpReceiverById:peerConnection Id:rtpReceiverId]; + if (receiver == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: receiver not found!"] + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = + [[RTCFrameCryptor alloc] initWithFactory:self.peerConnectionFactory + rtpReceiver:receiver + participantId:participantId + algorithm:[self getAlgorithm:algorithm] + keyProvider:keyProvider]; + NSString* frameCryptorId = [[NSUUID UUID] UUIDString]; + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/frameCryptorEvent%@", + frameCryptorId] + binaryMessenger:self.messenger]; + + frameCryptor.eventChannel = eventChannel; + [eventChannel setStreamHandler:frameCryptor]; + frameCryptor.delegate = self; + self.frameCryptors[frameCryptorId] = frameCryptor; + result(@{@"frameCryptorId" : frameCryptorId}); + } else { + result([FlutterError errorWithCode:@"InvalidArgument" message:@"Invalid type" details:nil]); + return; + } +} + +- (void)frameCryptorSetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + [frameCryptor setKeyIndex:[keyIndex intValue]]; + result(@{@"result" : @YES}); +} + +- (void)frameCryptorGetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetKeyIndexFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetKeyIndexFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + result(@{@"keyIndex" : [NSNumber numberWithInt:frameCryptor.keyIndex]}); +} + +- (void)frameCryptorSetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + + NSNumber* enabled = constraints[@"enabled"]; + if (enabled == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid enabled" + details:nil]); + return; + } + frameCryptor.enabled = [enabled boolValue]; + result(@{@"result" : enabled}); +} + +- (void)frameCryptorGetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetEnabledFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetEnabledFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + result(@{@"enabled" : [NSNumber numberWithBool:frameCryptor.enabled]}); +} + +- (void)frameCryptorDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorDisposeFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorDisposeFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + [self.frameCryptors removeObjectForKey:frameCryptorId]; + frameCryptor.enabled = NO; + result(@{@"result" : @"success"}); +} + +- (void)frameCryptorFactoryCreateKeyProvider:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* keyProviderId = [[NSUUID UUID] UUIDString]; + + id keyProviderOptions = constraints[@"keyProviderOptions"]; + if (keyProviderOptions == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid keyProviderOptions" + details:nil]); + return; + } + + NSNumber* sharedKey = keyProviderOptions[@"sharedKey"]; + if (sharedKey == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid sharedKey" + details:nil]); + return; + } + + FlutterStandardTypedData* ratchetSalt = keyProviderOptions[@"ratchetSalt"]; + if (ratchetSalt == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid ratchetSalt" + details:nil]); + return; + } + + NSNumber* ratchetWindowSize = keyProviderOptions[@"ratchetWindowSize"]; + if (ratchetWindowSize == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid ratchetWindowSize" + details:nil]); + return; + } + + NSNumber* failureTolerance = keyProviderOptions[@"failureTolerance"]; + + FlutterStandardTypedData* uncryptedMagicBytes = keyProviderOptions[@"uncryptedMagicBytes"]; + + NSNumber* keyRingSize = keyProviderOptions[@"keyRingSize"]; + + NSNumber* discardFrameWhenCryptorNotReady = keyProviderOptions[@"discardFrameWhenCryptorNotReady"]; + + RTCFrameCryptorKeyProvider* keyProvider = + [[RTCFrameCryptorKeyProvider alloc] initWithRatchetSalt:ratchetSalt.data + ratchetWindowSize:[ratchetWindowSize intValue] + sharedKeyMode:[sharedKey boolValue] + uncryptedMagicBytes: uncryptedMagicBytes != nil ? uncryptedMagicBytes.data : nil + failureTolerance:failureTolerance != nil ? [failureTolerance intValue] : -1 + keyRingSize:keyRingSize != nil ? [keyRingSize intValue] : 0 + discardFrameWhenCryptorNotReady:discardFrameWhenCryptorNotReady != nil ? [discardFrameWhenCryptorNotReady boolValue] : NO]; + self.keyProviders[keyProviderId] = keyProvider; + result(@{@"keyProviderId" : keyProviderId}); +} + +-(nullable RTCFrameCryptorKeyProvider *) getKeyProviderForId:(NSString*)keyProviderId result:(nonnull FlutterResult)result { + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProviderId" + details:nil]); + return nil; + } + RTCFrameCryptorKeyProvider* keyProvider = self.keyProviders[keyProviderId]; + if (keyProvider == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProvider" + details:nil]); + return nil; + } + return keyProvider; +} + +- (void)keyProviderSetSharedKey:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + FlutterStandardTypedData* key = constraints[@"key"]; + if (key == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid key" + details:nil]); + return; + } + + [keyProvider setSharedKey:key.data withIndex:[keyIndex intValue]]; + result(@{@"result" : @YES}); +} + +- (void)keyProviderRatchetSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetSharedKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSData* newKey = [keyProvider ratchetSharedKey:[keyIndex intValue]]; + result(@{@"result" : newKey}); +} + + +- (void)keyProviderExportSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderExportSharedKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSData* key = [keyProvider exportSharedKey:[keyIndex intValue]]; + result(@{@"result" : key}); +} + +- (void)keyProviderSetKey:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + FlutterStandardTypedData* key = constraints[@"key"]; + if (key == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid key" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + [keyProvider setKey:key.data withIndex:[keyIndex intValue] forParticipant:participantId]; + result(@{@"result" : @YES}); +} + +- (void)keyProviderRatchetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSData* newKey = [keyProvider ratchetKey:participantId withIndex:[keyIndex intValue]]; + result(@{@"result" : newKey}); +} + +- (void)keyProviderExportKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderExportKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderExportKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSData* key = [keyProvider exportKey:participantId withIndex:[keyIndex intValue]]; + result(@{@"result" : key}); +} + +- (void)keyProviderSetSifTrailer:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + FlutterStandardTypedData* sifTrailer = constraints[@"sifTrailer"]; + if (sifTrailer == nil) { + result([FlutterError errorWithCode:@"keyProviderSetSifTrailerFailed" + message:@"Invalid key" + details:nil]); + return; + } + + [keyProvider setSifTrailer:sifTrailer.data]; + result(nil); +} + +- (void)keyProviderDispose:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + NSString* keyProviderId = constraints[@"keyProviderId"]; + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProviderId" + details:nil]); + return; + } + [self.keyProviders removeObjectForKey:keyProviderId]; + result(@{@"result" : @"success"}); +} + +- (NSString*)stringFromState:(FrameCryptionState)state { + switch (state) { + case FrameCryptionStateNew: + return @"new"; + case FrameCryptionStateOk: + return @"ok"; + case FrameCryptionStateEncryptionFailed: + return @"encryptionFailed"; + case FrameCryptionStateDecryptionFailed: + return @"decryptionFailed"; + case FrameCryptionStateMissingKey: + return @"missingKey"; + case FrameCryptionStateKeyRatcheted: + return @"keyRatcheted"; + case FrameCryptionStateInternalError: + return @"internalError"; + default: + return @"unknown"; + } +} + +#pragma mark - RTCFrameCryptorDelegate methods + +- (void)frameCryptor:(RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor + didStateChangeWithParticipantId:(NSString*)participantId + withState:(FrameCryptionState)stateChanged { + if (frameCryptor.eventSink) { + postEvent(frameCryptor.eventSink, @{ + @"event" : @"frameCryptionStateChanged", + @"participantId" : participantId, + @"state" : [self stringFromState:stateChanged] + }); + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.h b/common/darwin/Classes/FlutterRTCMediaRecorder.h new file mode 100644 index 0000000000..eac82e8b4d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1,24 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import + +@import Foundation; +@import AVFoundation; + +@interface FlutterRTCMediaRecorder : NSObject + +@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; +@property(nonatomic, strong) NSURL* _Nonnull output; +@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter; +@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput; + +- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video + audioTrack:(RTCAudioTrack* _Nullable)audio + outputFile:(NSURL* _Nonnull)out; + +- (void)stop:(_Nonnull FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.m b/common/darwin/Classes/FlutterRTCMediaRecorder.m new file mode 100644 index 0000000000..7661aae519 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1,168 @@ +#import +#import "FlutterRTCMediaRecorder.h" +#import "FlutterRTCAudioSink.h" +#import "FlutterRTCFrameCapturer.h" + +@import AVFoundation; + +@implementation FlutterRTCMediaRecorder { + int framesCount; + bool isInitialized; + CGSize _renderSize; + FlutterRTCAudioSink* _audioSink; + AVAssetWriterInput* _audioWriter; + int64_t _startTime; +} + +- (instancetype)initWithVideoTrack:(RTCVideoTrack *)video audioTrack:(RTCAudioTrack *)audio outputFile:(NSURL *)out { + self = [super init]; + isInitialized = false; + self.videoTrack = video; + self.output = out; + [video addRenderer:self]; + framesCount = 0; + if (audio != nil) + _audioSink = [[FlutterRTCAudioSink alloc] initWithAudioTrack:audio]; + else + NSLog(@"Audio track is nil"); + _startTime = -1; + return self; +} + +- (void)initialize:(CGSize)size { + _renderSize = size; + NSDictionary *videoSettings = @{ + AVVideoCompressionPropertiesKey: @{AVVideoAverageBitRateKey: @(6*1024*1024)}, + AVVideoCodecKey: AVVideoCodecTypeH264, + AVVideoHeightKey: @(size.height), + AVVideoWidthKey: @(size.width), + }; + self.writerInput = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeVideo + outputSettings:videoSettings]; + self.writerInput.expectsMediaDataInRealTime = true; + self.writerInput.mediaTimeScale = 30; + + if (_audioSink != nil) { + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + NSDictionary* audioSettings = @{ + AVFormatIDKey: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], + AVNumberOfChannelsKey: @1, + AVSampleRateKey: @44100.0, + AVChannelLayoutKey: [NSData dataWithBytes:&acl length:sizeof(AudioChannelLayout)], + AVEncoderBitRateKey: @64000, + }; + _audioWriter = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeAudio + outputSettings:audioSettings + sourceFormatHint:_audioSink.format]; + _audioWriter.expectsMediaDataInRealTime = true; + } + + NSError *error; + self.assetWriter = [[AVAssetWriter alloc] + initWithURL:self.output + fileType:AVFileTypeMPEG4 + error:&error]; + if (error != nil) + NSLog(@"%@",[error localizedDescription]); + self.assetWriter.shouldOptimizeForNetworkUse = true; + [self.assetWriter addInput:self.writerInput]; + if (_audioWriter != nil) { + [self.assetWriter addInput:_audioWriter]; + _audioSink.bufferCallback = ^(CMSampleBufferRef buffer){ + if (self->_audioWriter.readyForMoreMediaData) { + if ([self->_audioWriter appendSampleBuffer:buffer]) + NSLog(@"Audio frame appended"); + else + NSLog(@"Audioframe not appended %@", self.assetWriter.error); + } + }; + } + [self.assetWriter startWriting]; + [self.assetWriter startSessionAtSourceTime:kCMTimeZero]; + + isInitialized = true; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame *)frame { + if (frame == nil) { + return; + } + if (!isInitialized) { + [self initialize:CGSizeMake((CGFloat) frame.width, (CGFloat) frame.height)]; + } + if (!self.writerInput.readyForMoreMediaData) { + NSLog(@"Drop frame, not ready"); + return; + } + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + BOOL shouldRelease = false; + if ([buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; + } else { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } + CMVideoFormatDescriptionRef formatDescription; + OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBufferRef, &formatDescription); + + CMSampleTimingInfo timingInfo; + + timingInfo.decodeTimeStamp = kCMTimeInvalid; + if (_startTime == -1) { + _startTime = frame.timeStampNs / 1000; + } + int64_t frameTime = (frame.timeStampNs / 1000) - _startTime; + timingInfo.presentationTimeStamp = CMTimeMake(frameTime, 1000000); + framesCount++; + + CMSampleBufferRef outBuffer; + + status = CMSampleBufferCreateReadyWithImageBuffer( + kCFAllocatorDefault, + pixelBufferRef, + formatDescription, + &timingInfo, + &outBuffer + ); + + if (![self.writerInput appendSampleBuffer:outBuffer]) { + NSLog(@"Frame not appended %@", self.assetWriter.error); + } + #if TARGET_OS_IPHONE + if (shouldRelease) { + CVPixelBufferRelease(pixelBufferRef); + } + #endif +} + +- (void)stop:(FlutterResult _Nonnull) result { + if (_audioSink != nil) { + _audioSink.bufferCallback = nil; + [_audioSink close]; + } + [self.videoTrack removeRenderer:self]; + [self.writerInput markAsFinished]; + [_audioWriter markAsFinished]; + dispatch_async(dispatch_get_main_queue(), ^{ + [self.assetWriter finishWritingWithCompletionHandler:^{ + NSError* error = self.assetWriter.error; + if (error == nil) { + result(nil); + } else { + result([FlutterError errorWithCode:@"Failed to save recording" + message:[error localizedDescription] + details:nil]); + } + }]; + }); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.h b/common/darwin/Classes/FlutterRTCMediaStream.h new file mode 100644 index 0000000000..95a6c960b0 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.h @@ -0,0 +1,23 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface RTCMediaStreamTrack (Flutter) +@property(nonatomic, strong, nonnull) id settings; +@end + +@interface FlutterWebRTCPlugin (RTCMediaStream) + +- (void)getUserMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)createLocalMediaStream:(nonnull FlutterResult)result; + +- (void)getSources:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackCaptureFrame:(nonnull RTCMediaStreamTrack*)track + toPath:(nonnull NSString*)path + result:(nonnull FlutterResult)result; + +- (void)selectAudioInput:(nonnull NSString*)deviceId result:(nullable FlutterResult)result; + +- (void)selectAudioOutput:(nonnull NSString*)deviceId result:(nullable FlutterResult)result; +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m new file mode 100644 index 0000000000..5fb81eda23 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -0,0 +1,990 @@ +#import +#import "AudioUtils.h" +#import "CameraUtils.h" +#import "FlutterRTCFrameCapturer.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCPeerConnection.h" +#import "VideoProcessingAdapter.h" +#import "LocalVideoTrack.h" +#import "LocalAudioTrack.h" + +@implementation RTCMediaStreamTrack (Flutter) + +- (id)settings { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setSettings:(id)settings { + objc_setAssociatedObject(self, @selector(settings), settings, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} +@end + +@implementation AVCaptureDevice (Flutter) + +- (NSString*)positionString { + switch (self.position) { + case AVCaptureDevicePositionUnspecified: + return @"unspecified"; + case AVCaptureDevicePositionBack: + return @"back"; + case AVCaptureDevicePositionFront: + return @"front"; + } + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCMediaStream) + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} + */ +typedef void (^NavigatorUserMediaErrorCallback)(NSString* errorType, NSString* errorMessage); + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} + */ +typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream* mediaStream); + +- (NSDictionary*)defaultVideoConstraints { + return @{@"minWidth" : @"1280", @"minHeight" : @"720", @"minFrameRate" : @"30"}; +} + +- (NSDictionary*)defaultAudioConstraints { + return @{}; +} + + +- (RTCMediaConstraints*)defaultMediaStreamConstraints { + RTCMediaConstraints* constraints = + [[RTCMediaConstraints alloc] initWithMandatoryConstraints:[self defaultVideoConstraints] + optionalConstraints:nil]; + return constraints; +} + + +- (NSArray *) captureDevices { + if (@available(iOS 13.0, macOS 10.15, macCatalyst 14.0, tvOS 17.0, *)) { + NSArray *deviceTypes = @[ +#if TARGET_OS_IPHONE + AVCaptureDeviceTypeBuiltInTripleCamera, + AVCaptureDeviceTypeBuiltInDualCamera, + AVCaptureDeviceTypeBuiltInDualWideCamera, + AVCaptureDeviceTypeBuiltInWideAngleCamera, + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInUltraWideCamera, +#else + AVCaptureDeviceTypeBuiltInWideAngleCamera, +#endif + ]; + +#if !defined(TARGET_OS_IPHONE) + if (@available(macOS 13.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObject:AVCaptureDeviceTypeDeskViewCamera]; + } +#endif + + if (@available(iOS 17.0, macOS 14.0, tvOS 17.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObjectsFromArray: @[ + AVCaptureDeviceTypeContinuityCamera, + AVCaptureDeviceTypeExternal, + ]]; + } + + return [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified].devices; + } + return @[]; +} + +/** + * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the audio-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCAudioTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCAudioTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserAudio:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + id audioConstraints = constraints[@"audio"]; + NSString* audioDeviceId = @""; + RTCMediaConstraints *rtcConstraints; + if ([audioConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.audio.deviceId + NSString* deviceId = audioConstraints[@"deviceId"]; + + if (deviceId) { + audioDeviceId = deviceId; + } + + rtcConstraints = [self parseMediaConstraints:audioConstraints]; + // constraints.audio.optional.sourceId + id optionalConstraints = audioConstraints[@"optional"]; + if (optionalConstraints && [optionalConstraints isKindOfClass:[NSArray class]] && + !deviceId) { + NSArray* options = optionalConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; + if (sourceId) { + audioDeviceId = sourceId; + } + } + } + } + } else { + rtcConstraints = [self parseMediaConstraints:[self defaultAudioConstraints]]; + } + +#if !defined(TARGET_OS_IPHONE) + if (audioDeviceId != nil) { + [self selectAudioInput:audioDeviceId result:nil]; + } +#endif + + NSString* trackId = [[NSUUID UUID] UUIDString]; + RTCAudioSource *audioSource = [self.peerConnectionFactory audioSourceWithConstraints:rtcConstraints]; + RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource trackId:trackId]; + LocalAudioTrack *localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack]; + + audioTrack.settings = @{ + @"deviceId" : audioDeviceId, + @"kind" : @"audioinput", + @"autoGainControl" : @YES, + @"echoCancellation" : @YES, + @"noiseSuppression" : @YES, + @"channelCount" : @1, + @"latency" : @0, + }; + + [mediaStream addAudioTrack:audioTrack]; + + [self.localTracks setObject:localAudioTrack forKey:trackId]; + + [self ensureAudioSession]; + + successCallback(mediaStream); +} + +// TODO: Use RCTConvert for constraints ... +- (void)getUserMedia:(NSDictionary*)constraints result:(FlutterResult)result { + // Initialize RTCMediaStream with a unique label in order to allow multiple + // RTCMediaStream instances initialized by multiple getUserMedia calls to be + // added to 1 RTCPeerConnection instance. As suggested by + // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good + // practice, use a UUID (conforming to RFC4122). + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + [self getUserMedia:constraints + successCallback:^(RTCMediaStream* mediaStream) { + NSString* mediaStreamId = mediaStream.streamId; + + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack* track in mediaStream.audioTracks) { + [audioTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live", + @"settings" : track.settings + }]; + } + + for (RTCVideoTrack* track in mediaStream.videoTracks) { + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live", + @"settings" : track.settings + }]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{ + @"streamId" : mediaStreamId, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks + }); + } + errorCallback:^(NSString* errorType, NSString* errorMessage) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] + message:errorMessage + details:nil]); + } + mediaStream:mediaStream]; +} + +/** + * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which + * satisfies specific constraints and adds it to a specific + * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track + * of the respective media type and the specified {@code constraints} specify + * that a track of the respective media type is required; otherwise, reports + * success for the specified {@code mediaStream} to a specific + * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media + * type-specific iteration of or successfully concludes the + * {@code getUserMedia()} algorithm. The method will be recursively invoked to + * conclude the whole {@code getUserMedia()} algorithm either with (successful) + * satisfaction of the specified {@code constraints} or with failure. + * + * @param constraints The {@code MediaStreamConstraints} which specifies the + * requested media types and which the new {@code RTCAudioTrack} or + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm. + */ +- (void)getUserMedia:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + // If mediaStream contains no audioTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local audio content. + if (mediaStream.audioTracks.count == 0) { + // constraints.audio + id audioConstraints = constraints[@"audio"]; + BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; + if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { + [self requestAccessForMediaType:AVMediaTypeAudio + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } + } + + // If mediaStream contains no videoTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local video content. + if (mediaStream.videoTracks.count == 0) { + // constraints.video + id videoConstraints = constraints[@"video"]; + if (videoConstraints) { + BOOL requestAccessForVideo = [videoConstraints isKindOfClass:[NSNumber class]] + ? [videoConstraints boolValue] + : [videoConstraints isKindOfClass:[NSDictionary class]]; +#if !TARGET_IPHONE_SIMULATOR + if (requestAccessForVideo) { + [self requestAccessForMediaType:AVMediaTypeVideo + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } +#endif + } + } + + // There are audioTracks and/or videoTracks in mediaStream as requested by + // constraints so the getUserMedia() is to conclude with success. + successCallback(mediaStream); +} + +- (int)getConstrainInt:(NSDictionary*)constraints forKey:(NSString*)key { + if (![constraints isKindOfClass:[NSDictionary class]]) { + return 0; + } + + id constraint = constraints[key]; + if ([constraint isKindOfClass:[NSNumber class]]) { + return [constraint intValue]; + } else if ([constraint isKindOfClass:[NSString class]]) { + int possibleValue = [constraint intValue]; + if (possibleValue != 0) { + return possibleValue; + } + } else if ([constraint isKindOfClass:[NSDictionary class]]) { + id idealConstraint = constraint[@"ideal"]; + if ([idealConstraint isKindOfClass:[NSString class]]) { + int possibleValue = [idealConstraint intValue]; + if (possibleValue != 0) { + return possibleValue; + } + } + } + + return 0; +} + +/** + * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the video-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCVideoTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserVideo:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + id videoConstraints = constraints[@"video"]; + AVCaptureDevice* videoDevice; + NSString* videoDeviceId = nil; + NSString* facingMode = nil; + NSArray* captureDevices = [self captureDevices]; + + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.deviceId + NSString* deviceId = videoConstraints[@"deviceId"]; + + if (deviceId) { + for (AVCaptureDevice *device in captureDevices) { + if( [deviceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = deviceId; + } + } + } + + // constraints.video.optional + id optionalVideoConstraints = videoConstraints[@"optional"]; + if (optionalVideoConstraints && [optionalVideoConstraints isKindOfClass:[NSArray class]] && + !videoDevice) { + NSArray* options = optionalVideoConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; + if (sourceId) { + for (AVCaptureDevice *device in captureDevices) { + if( [sourceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = sourceId; + } + } + if (videoDevice) { + break; + } + } + } + } + } + + if (!videoDevice) { + // constraints.video.facingMode + // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode + facingMode = videoConstraints[@"facingMode"]; + if (facingMode && [facingMode isKindOfClass:[NSString class]]) { + AVCaptureDevicePosition position; + if ([facingMode isEqualToString:@"environment"]) { + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionBack; + } else if ([facingMode isEqualToString:@"user"]) { + self._usingFrontCamera = YES; + position = AVCaptureDevicePositionFront; + } else { + // If the specified facingMode value is not supported, fall back to + // the default video device. + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionUnspecified; + } + videoDevice = [self findDeviceForPosition:position]; + } + } + } + + if ([videoConstraints isKindOfClass:[NSNumber class]]) { + videoConstraints = @{@"mandatory": [self defaultVideoConstraints]}; + } + + NSInteger targetWidth = 0; + NSInteger targetHeight = 0; + NSInteger targetFps = 0; + + if (!videoDevice) { + videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + + int possibleWidth = [self getConstrainInt:videoConstraints forKey:@"width"]; + if (possibleWidth != 0) { + targetWidth = possibleWidth; + } + + int possibleHeight = [self getConstrainInt:videoConstraints forKey:@"height"]; + if (possibleHeight != 0) { + targetHeight = possibleHeight; + } + + int possibleFps = [self getConstrainInt:videoConstraints forKey:@"frameRate"]; + if (possibleFps != 0) { + targetFps = possibleFps; + } + + id mandatory = + [videoConstraints isKindOfClass:[NSDictionary class]] ? videoConstraints[@"mandatory"] : nil; + + // constraints.video.mandatory + if (mandatory && [mandatory isKindOfClass:[NSDictionary class]]) { + id widthConstraint = mandatory[@"minWidth"]; + if ([widthConstraint isKindOfClass:[NSString class]] || + [widthConstraint isKindOfClass:[NSNumber class]]) { + int possibleWidth = [widthConstraint intValue]; + if (possibleWidth != 0) { + targetWidth = possibleWidth; + } + } + id heightConstraint = mandatory[@"minHeight"]; + if ([heightConstraint isKindOfClass:[NSString class]] || + [heightConstraint isKindOfClass:[NSNumber class]]) { + int possibleHeight = [heightConstraint intValue]; + if (possibleHeight != 0) { + targetHeight = possibleHeight; + } + } + id fpsConstraint = mandatory[@"minFrameRate"]; + if ([fpsConstraint isKindOfClass:[NSString class]] || + [fpsConstraint isKindOfClass:[NSNumber class]]) { + int possibleFps = [fpsConstraint intValue]; + if (possibleFps != 0) { + targetFps = possibleFps; + } + } + } + + if (videoDevice) { + RTCVideoSource* videoSource = [self.peerConnectionFactory videoSource]; +#if TARGET_OS_OSX + if (self.videoCapturer) { + [self.videoCapturer stopCapture]; + } +#endif + + VideoProcessingAdapter *videoProcessingAdapter = [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoProcessingAdapter]; + + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:targetWidth + targetHeight:targetHeight]; + + CMVideoDimensions selectedDimension = CMVideoFormatDescriptionGetDimensions(selectedFormat.formatDescription); + NSInteger selectedWidth = (NSInteger) selectedDimension.width; + NSInteger selectedHeight = (NSInteger) selectedDimension.height; + NSInteger selectedFps = [self selectFpsForFormat:selectedFormat targetFps:targetFps]; + + self._lastTargetFps = selectedFps; + self._lastTargetWidth = targetWidth; + self._lastTargetHeight = targetHeight; + + NSLog(@"target format %ldx%ld, targetFps: %ld, selected format: %ldx%ld, selected fps %ld", targetWidth, targetHeight, targetFps, selectedWidth, selectedHeight, selectedFps); + + if ([videoDevice lockForConfiguration:NULL]) { + @try { + videoDevice.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)selectedFps); + videoDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)selectedFps); + } @catch (NSException* exception) { + NSLog(@"Failed to set active frame rate!\n User info:%@", exception.userInfo); + } + [videoDevice unlockForConfiguration]; + } + + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:selectedFps + completionHandler:^(NSError* error) { + if (error) { + NSLog(@"Start capture error: %@", [error localizedDescription]); + } + }]; + + NSString* trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:trackUUID]; + LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + + __weak RTCCameraVideoCapturer* capturer = self.videoCapturer; + self.videoCapturerStopHandlers[videoTrack.trackId] = ^(CompletionHandler handler) { + NSLog(@"Stop video capturer, trackID %@", videoTrack.trackId); + [capturer stopCaptureWithCompletionHandler:handler]; + }; + + if (!videoDeviceId) { + videoDeviceId = videoDevice.uniqueID; + } + + if (!facingMode) { + facingMode = videoDevice.position == AVCaptureDevicePositionBack ? @"environment" + : videoDevice.position == AVCaptureDevicePositionFront ? @"user" + : @"unspecified"; + } + + videoTrack.settings = @{ + @"deviceId" : videoDeviceId, + @"kind" : @"videoinput", + @"width" : [NSNumber numberWithInteger:selectedWidth], + @"height" : [NSNumber numberWithInteger:selectedHeight], + @"frameRate" : [NSNumber numberWithInteger:selectedFps], + @"facingMode" : facingMode, + }; + + [mediaStream addVideoTrack:videoTrack]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; + + successCallback(mediaStream); + } else { + // According to step 6.2.3 of the getUserMedia() algorithm, if there is no + // source, fail with a new OverconstrainedError. + errorCallback(@"OverconstrainedError", /* errorMessage */ nil); + } +} + +- (void)mediaStreamRelease:(RTCMediaStream*)stream { + if (stream) { + for (RTCVideoTrack* track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack* track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:stream.streamId]; + } +} + +/** + * Obtains local media content of a specific type. Requests access for the + * specified {@code mediaType} if necessary. In other words, implements a media + * type-specific iteration of the {@code getUserMedia()} algorithm. + * + * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} + * which specifies the type of the local media content to obtain. + * @param constraints The {@code MediaStreamConstraints} which are to be + * satisfied by the obtained local media content. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is to collect the + * obtained local media content of the specified {@code mediaType}. + */ +- (void)requestAccessForMediaType:(NSString*)mediaType + constraints:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + // According to step 6.2.1 of the getUserMedia() algorithm, if there is no + // source, fail "with a new DOMException object whose name attribute has the + // value NotFoundError." + // XXX The following approach does not work for audio in Simulator. That is + // because audio capture is done using AVAudioSession which does not use + // AVCaptureDevice there. Anyway, Simulator will not (visually) request access + // for audio. + if (mediaType == AVMediaTypeVideo && [self captureDevices].count == 0) { + // Since successCallback and errorCallback are asynchronously invoked + // elsewhere, make sure that the invocation here is consistent. + dispatch_async(dispatch_get_main_queue(), ^{ + errorCallback(@"DOMException", @"NotFoundError"); + }); + return; + } + +#if TARGET_OS_OSX + if (@available(macOS 10.14, *)) { +#endif + [AVCaptureDevice requestAccessForMediaType:mediaType + completionHandler:^(BOOL granted) { + dispatch_async(dispatch_get_main_queue(), ^{ + if (granted) { + NavigatorUserMediaSuccessCallback scb = + ^(RTCMediaStream* mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } else { + // According to step 10 Permission Failure of the getUserMedia() + // algorithm, if the user has denied permission, fail "with a new + // DOMException object whose name attribute has the value + // NotAllowedError." + errorCallback(@"DOMException", @"NotAllowedError"); + } + }); + }]; +#if TARGET_OS_OSX + } else { + // Fallback on earlier versions + NavigatorUserMediaSuccessCallback scb = ^(RTCMediaStream* mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } +#endif +} + +- (void)createLocalMediaStream:(FlutterResult)result { + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId" : [mediaStream streamId]}); +} + +- (void)getSources:(FlutterResult)result { + NSMutableArray* sources = [NSMutableArray array]; + NSArray* videoDevices = [self captureDevices]; + for (AVCaptureDevice* device in videoDevices) { + [sources addObject:@{ + @"facing" : device.positionString, + @"deviceId" : device.uniqueID, + @"label" : device.localizedName, + @"kind" : @"videoinput", + }]; + } +#if TARGET_OS_IPHONE + + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription* port in session.session.availableInputs) { + // NSLog(@"input portName: %@, type %@", port.portName,port.portType); + [sources addObject:@{ + @"deviceId" : port.UID, + @"label" : port.portName, + @"groupId" : port.portType, + @"kind" : @"audioinput", + }]; + } + + for (AVAudioSessionPortDescription* port in session.currentRoute.outputs) { + // NSLog(@"output portName: %@, type %@", port.portName,port.portType); + if (session.currentRoute.outputs.count == 1 && ![port.UID isEqualToString:@"Speaker"]) { + [sources addObject:@{ + @"deviceId" : @"Speaker", + @"label" : @"Speaker", + @"groupId" : @"Speaker", + @"kind" : @"audiooutput", + }]; + } + [sources addObject:@{ + @"deviceId" : port.UID, + @"label" : port.portName, + @"groupId" : port.portType, + @"kind" : @"audiooutput", + }]; + } +#endif +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + + NSArray* inputDevices = [audioDeviceModule inputDevices]; + for (RTCIODevice* device in inputDevices) { + [sources addObject:@{ + @"deviceId" : device.deviceId, + @"label" : device.name, + @"kind" : @"audioinput", + }]; + } + + NSArray* outputDevices = [audioDeviceModule outputDevices]; + for (RTCIODevice* device in outputDevices) { + [sources addObject:@{ + @"deviceId" : device.deviceId, + @"label" : device.name, + @"kind" : @"audiooutput", + }]; + } +#endif + result(@{@"sources" : sources}); +} + +- (void)selectAudioInput:(NSString*)deviceId result:(FlutterResult)result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray* inputDevices = [audioDeviceModule inputDevices]; + for (RTCIODevice* device in inputDevices) { + if ([deviceId isEqualToString:device.deviceId]) { + [audioDeviceModule setInputDevice:device]; + if (result) + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription* port in session.session.availableInputs) { + if ([port.UID isEqualToString:deviceId]) { + if (self.preferredInput != port.portType) { + self.preferredInput = port.portType; + [AudioUtils selectAudioInput:self.preferredInput]; + } + break; + } + } + if (result) + result(nil); +#endif + if (result) + result([FlutterError errorWithCode:@"selectAudioInputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +- (void)selectAudioOutput:(NSString*)deviceId result:(FlutterResult)result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray* outputDevices = [audioDeviceModule outputDevices]; + for (RTCIODevice* device in outputDevices) { + if ([deviceId isEqualToString:device.deviceId]) { + [audioDeviceModule setOutputDevice:device]; + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + NSError* setCategoryError = nil; + + if ([deviceId isEqualToString:@"Speaker"]) { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker + error:&setCategoryError]; + } else { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&setCategoryError]; + } + + if (setCategoryError == nil) { + result(nil); + return; + } + + result([FlutterError + errorWithCode:@"selectAudioOutputFailed" + message:[NSString + stringWithFormat:@"Error: %@", [setCategoryError localizedFailureReason]] + details:nil]); + +#endif + result([FlutterError errorWithCode:@"selectAudioOutputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +- (void)mediaStreamTrackRelease:(RTCMediaStream*)mediaStream track:(RTCMediaStreamTrack*)track { + // what's different to mediaStreamTrackStop? only call mediaStream explicitly? + if (mediaStream && track) { + track.isEnabled = NO; + // FIXME this is called when track is removed from the MediaStream, + // but it doesn't mean it can not be added back using MediaStream.addTrack + // TODO: [self.localTracks removeObjectForKey:trackID]; + if ([track.kind isEqualToString:@"audio"]) { + [mediaStream removeAudioTrack:(RTCAudioTrack*)track]; + } else if ([track.kind isEqualToString:@"video"]) { + [mediaStream removeVideoTrack:(RTCVideoTrack*)track]; + } + } +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + result(@NO); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + result(@NO); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set torch"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set torch"); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"Not supported on macOS. Can't set zoom"); + return; +#endif +#if TARGET_OS_IPHONE + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set zoom"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set zoom"); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#endif +} + +- (void)mediaStreamTrackCaptureFrame:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result { + self.frameCapturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track + toPath:path + result:result]; +} + +- (void)mediaStreamTrackStop:(RTCMediaStreamTrack*)track { + if (track) { + track.isEnabled = NO; + [self.localTracks removeObjectForKey:track.trackId]; + } +} + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + if(captureDevices.count > 0) { + return captureDevices[0]; + } + return nil; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); +#if TARGET_OS_IPHONE + if (@available(iOS 13.0, *)) { + if(format.isMultiCamSupported != AVCaptureMultiCamSession.multiCamSupported) { + continue; + } + } +#endif + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.h b/common/darwin/Classes/FlutterRTCPeerConnection.h new file mode 100644 index 0000000000..bd86076209 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1,65 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCPeerConnection (Flutter) +@property(nonatomic, strong, nonnull) NSMutableDictionary* dataChannels; +@property(nonatomic, strong, nonnull) + NSMutableDictionary* remoteStreams; +@property(nonatomic, strong, nonnull) + NSMutableDictionary* remoteTracks; +@property(nonatomic, strong, nonnull) NSString* flutterId; +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCPeerConnection) + +- (void)peerConnectionCreateOffer:(nonnull NSDictionary*)constraints + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionCreateAnswer:(nonnull NSDictionary*)constraints + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionSetLocalDescription:(nonnull RTCSessionDescription*)sdp + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionSetRemoteDescription:(nonnull RTCSessionDescription*)sdp + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionAddICECandidate:(nonnull RTCIceCandidate*)candidate + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (nonnull RTCMediaConstraints*)parseMediaConstraints:(nonnull NSDictionary*)constraints; + +- (void)peerConnectionSetConfiguration:(nonnull RTCConfiguration*)configuration + peerConnection:(nonnull RTCPeerConnection*)peerConnection; + +- (void)peerConnectionGetRtpReceiverCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (nullable NSString*)stringForSignalingState:(RTCSignalingState)state; + +- (nullable NSString*)stringForICEGatheringState:(RTCIceGatheringState)state; + +- (nullable NSString*)stringForICEConnectionState:(RTCIceConnectionState)state; + +- (nullable NSString*)stringForPeerConnectionState:(RTCPeerConnectionState)state; + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.m b/common/darwin/Classes/FlutterRTCPeerConnection.m new file mode 100644 index 0000000000..d37eafde10 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1,850 @@ +#import "FlutterRTCPeerConnection.h" +#import +#import "AudioUtils.h" +#import "FlutterRTCDataChannel.h" +#import "FlutterWebRTCPlugin.h" + +#import + +@implementation RTCPeerConnection (Flutter) + +@dynamic eventSink; + +- (NSString*)flutterId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterId:(NSString*)flutterId { + objc_setAssociatedObject(self, @selector(flutterId), flutterId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)dataChannels { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setDataChannels:(NSMutableDictionary*)dataChannels { + objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)remoteStreams { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteStreams:(NSMutableDictionary*)remoteStreams { + objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)remoteTracks { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteTracks:(NSMutableDictionary*)remoteTracks { + objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCPeerConnection) + +- (void)peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection { + [peerConnection setConfiguration:configuration]; +} + +- (void)peerConnectionCreateOffer:(NSDictionary*)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + offerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription* sdp, NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"CreateOfferFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + }]; +} + +- (void)peerConnectionCreateAnswer:(NSDictionary*)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + answerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription* sdp, NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"CreateAnswerFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + }]; +} + +- (void)peerConnectionSetLocalDescription:(RTCSessionDescription*)sdp + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + setLocalDescription:sdp + completionHandler:^(NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"SetLocalDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionSetRemoteDescription:(RTCSessionDescription*)sdp + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + setRemoteDescription:sdp + completionHandler:^(NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"SetRemoteDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + addIceCandidate:candidate + completionHandler:^(NSError* _Nullable error) { + if (error) { + result([FlutterError + errorWithCode:@"AddIceCandidateFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionClose:(RTCPeerConnection*)peerConnection { + [peerConnection close]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + for (NSString* dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; +} + +- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result { + RTCRtpSender* sender = nil; + RTCRtpReceiver* receiver = nil; + + for (RTCRtpSender* s in peerConnection.senders) { + if (s.track != nil && [s.track.trackId isEqualToString:trackID]) { + sender = s; + } + } + + for (RTCRtpReceiver* r in peerConnection.receivers) { + if (r.track != nil && [r.track.trackId isEqualToString:trackID]) { + receiver = r; + } + } + + if (sender != nil) { + [peerConnection statisticsForSender:sender + completionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; + } else if (receiver != nil) { + [peerConnection statisticsForReceiver:receiver + completionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; + } else { + result([FlutterError errorWithCode:@"GetStatsFailed" + message:[NSString stringWithFormat:@"Error %@", @""] + details:nil]); + } +} + +- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result { + [peerConnection statisticsWithCompletionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; +} + +- (NSString*)stringForICEConnectionState:(RTCIceConnectionState)state { + switch (state) { + case RTCIceConnectionStateNew: + return @"new"; + case RTCIceConnectionStateChecking: + return @"checking"; + case RTCIceConnectionStateConnected: + return @"connected"; + case RTCIceConnectionStateCompleted: + return @"completed"; + case RTCIceConnectionStateFailed: + return @"failed"; + case RTCIceConnectionStateDisconnected: + return @"disconnected"; + case RTCIceConnectionStateClosed: + return @"closed"; + case RTCIceConnectionStateCount: + return @"count"; + } + return nil; +} + +- (NSString*)stringForICEGatheringState:(RTCIceGatheringState)state { + switch (state) { + case RTCIceGatheringStateNew: + return @"new"; + case RTCIceGatheringStateGathering: + return @"gathering"; + case RTCIceGatheringStateComplete: + return @"complete"; + } + return nil; +} + +- (NSString*)stringForSignalingState:(RTCSignalingState)state { + switch (state) { + case RTCSignalingStateStable: + return @"stable"; + case RTCSignalingStateHaveLocalOffer: + return @"have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: + return @"have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: + return @"have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: + return @"have-remote-pranswer"; + case RTCSignalingStateClosed: + return @"closed"; + } + return nil; +} + +- (NSString*)stringForPeerConnectionState:(RTCPeerConnectionState)state { + switch (state) { + case RTCPeerConnectionStateNew: + return @"new"; + case RTCPeerConnectionStateConnecting: + return @"connecting"; + case RTCPeerConnectionStateConnected: + return @"connected"; + case RTCPeerConnectionStateDisconnected: + return @"disconnected"; + case RTCPeerConnectionStateFailed: + return @"failed"; + case RTCPeerConnectionStateClosed: + return @"closed"; + } + return nil; +} + +/** + * Parses the constraint keys and values of a specific JavaScript object into + * a specific NSMutableDictionary in a format suitable for the + * initialization of a RTCMediaConstraints instance. + * + * @param src The JavaScript object which defines constraint keys and values and + * which is to be parsed into the specified dst. + * @param dst The NSMutableDictionary into which the constraint keys + * and values defined by src are to be written in a format suitable for + * the initialization of a RTCMediaConstraints instance. + */ +- (void)parseJavaScriptConstraints:(NSDictionary*)src + intoWebRTCConstraints:(NSMutableDictionary*)dst { + for (id srcKey in src) { + id srcValue = src[srcKey]; + NSString* dstValue; + + if ([srcValue isKindOfClass:[NSNumber class]]) { + dstValue = [srcValue boolValue] ? @"true" : @"false"; + } else { + dstValue = [srcValue description]; + } + dst[[srcKey description]] = dstValue; + } +} + +/** + * Parses a JavaScript object into a new RTCMediaConstraints instance. + * + * @param constraints The JavaScript object to parse into a new + * RTCMediaConstraints instance. + * @returns A new RTCMediaConstraints instance initialized with the + * mandatory and optional constraint keys and values specified by + * constraints. + */ +- (RTCMediaConstraints*)parseMediaConstraints:(NSDictionary*)constraints { + id mandatory = constraints[@"mandatory"]; + NSMutableDictionary* mandatory_ = [NSMutableDictionary new]; + + if ([mandatory isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary*)mandatory intoWebRTCConstraints:mandatory_]; + } + + id optional = constraints[@"optional"]; + NSMutableDictionary* optional_ = [NSMutableDictionary new]; + + if ([optional isKindOfClass:[NSArray class]]) { + for (id o in (NSArray*)optional) { + if ([o isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary*)o intoWebRTCConstraints:optional_]; + } + } + } + + return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ + optionalConstraints:optional_]; +} + +#pragma mark - RTCPeerConnectionDelegate methods +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeSignalingState:(RTCSignalingState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{@"event" : @"signalingState", @"state" : [self stringForSignalingState:newState]}); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + mediaStream:(RTCMediaStream*)stream + didAddTrack:(RTCVideoTrack*)track { + peerConnection.remoteTracks[track.trackId] = track; + NSString* streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onAddTrack", + @"streamId" : streamId, + @"trackId" : track.trackId, + @"track" : @{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + mediaStream:(RTCMediaStream*)stream + didRemoveTrack:(RTCVideoTrack*)track { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + NSString* streamId = stream.streamId; + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRemoveTrack", + @"streamId" : streamId, + @"trackId" : track.trackId, + @"track" : @{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didAddStream:(RTCMediaStream*)stream { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + BOOL hasAudio = NO; + for (RTCAudioTrack* track in stream.audioTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [audioTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + hasAudio = YES; + } + + for (RTCVideoTrack* track in stream.videoTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + } + + NSString* streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + if (hasAudio) { + [self ensureAudioSession]; + } + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onAddStream", + @"streamId" : streamId, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks, + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didRemoveStream:(RTCMediaStream*)stream { + NSArray* keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; + // We assume there can be only one object for 1 key + if (keysArray.count > 1) { + NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", + stream.streamId); + } + NSString* streamId = stream.streamId; + + for (RTCVideoTrack* track in stream.videoTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack* track in stream.audioTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRemoveStream", + @"streamId" : streamId, + }); + } +} + +- (void)peerConnectionShouldNegotiate:(RTCPeerConnection*)peerConnection { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRenegotiationNeeded", + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeIceConnectionState:(RTCIceConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"iceConnectionState", + @"state" : [self stringForICEConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeIceGatheringState:(RTCIceGatheringState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{@"event" : @"iceGatheringState", @"state" : [self stringForICEGatheringState:newState]}); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didGenerateIceCandidate:(RTCIceCandidate*)candidate { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onCandidate", + @"candidate" : @{ + @"candidate" : candidate.sdp, + @"sdpMLineIndex" : @(candidate.sdpMLineIndex), + @"sdpMid" : candidate.sdpMid + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didOpenDataChannel:(RTCDataChannel*)dataChannel { + if (-1 == dataChannel.channelId) { + return; + } + + NSString* flutterChannelId = [[NSUUID UUID] UUIDString]; + NSNumber* dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; + dataChannel.peerConnectionId = peerConnection.flutterId; + dataChannel.delegate = self; + peerConnection.dataChannels[flutterChannelId] = dataChannel; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", + peerConnection.flutterId, flutterChannelId] + binaryMessenger:self.messenger]; + + dataChannel.eventChannel = eventChannel; + dataChannel.flutterChannelId = flutterChannelId; + dataChannel.eventQueue = nil; + + dispatch_async(dispatch_get_main_queue(), ^{ + // setStreamHandler on main thread + [eventChannel setStreamHandler:dataChannel]; + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"didOpenDataChannel", + @"id" : dataChannelId, + @"label" : dataChannel.label, + @"flutterId" : flutterChannelId + }); + } + }); +} + +/** Called any time the PeerConnectionState changes. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeConnectionState:(RTCPeerConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"peerConnectionState", + @"state" : [self stringForPeerConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didStartReceivingOnTransceiver:(RTCRtpTransceiver*)transceiver { +} + +/** Called when a receiver and its track are created. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didAddReceiver:(RTCRtpReceiver*)rtpReceiver + streams:(NSArray*)mediaStreams { + // For unified-plan + NSMutableArray* streams = [NSMutableArray array]; + for (RTCMediaStream* stream in mediaStreams) { + [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; + } + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + NSMutableDictionary* event = [NSMutableDictionary dictionary]; + [event addEntriesFromDictionary:@{ + @"event" : @"onTrack", + @"track" : [self mediaTrackToMap:rtpReceiver.track], + @"receiver" : [self receiverToMap:rtpReceiver], + @"streams" : streams, + }]; + + if (peerConnection.configuration.sdpSemantics == RTCSdpSemanticsUnifiedPlan) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver != nil && + [transceiver.receiver.receiverId isEqualToString:rtpReceiver.receiverId]) { + [event setValue:[self transceiverToMap:transceiver] forKey:@"transceiver"]; + } + } + } + + peerConnection.remoteTracks[rtpReceiver.track.trackId] = rtpReceiver.track; + if (mediaStreams.count > 0) { + peerConnection.remoteStreams[mediaStreams[0].streamId] = mediaStreams[0]; + } + + if ([rtpReceiver.track.kind isEqualToString:@"audio"]) { + [self ensureAudioSession]; + } + postEvent(eventSink, event); + } +} + +/** Called when the receiver and its track are removed. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didRemoveReceiver:(RTCRtpReceiver*)rtpReceiver { +} + +/** Called when the selected ICE candidate pair is changed. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeLocalCandidate:(RTCIceCandidate*)local + remoteCandidate:(RTCIceCandidate*)remote + lastReceivedMs:(int)lastDataReceivedMs + changeReason:(NSString*)reason { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onSelectedCandidatePairChanged", + @"local" : @{ + @"candidate" : local.sdp, + @"sdpMLineIndex" : @(local.sdpMLineIndex), + @"sdpMid" : local.sdpMid + }, + @"remote" : @{ + @"candidate" : remote.sdp, + @"sdpMLineIndex" : @(remote.sdpMLineIndex), + @"sdpMid" : remote.sdpMid + }, + @"reason" : reason, + @"lastDataReceivedMs" : @(lastDataReceivedMs) + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didRemoveIceCandidates:(NSArray*)candidates { +} + +NSString* mediaTypeFromString(NSString* kind) { + NSString* mediaType = kRTCMediaStreamTrackKindAudio; + if ([kind isEqualToString:@"audio"]) { + mediaType = kRTCMediaStreamTrackKindAudio; + } else if ([kind isEqualToString:@"video"]) { + mediaType = kRTCMediaStreamTrackKindVideo; + } + return mediaType; +} + +NSString* parametersToString(NSDictionary* parameters) { + NSMutableArray* kvs = [NSMutableArray array]; + for (NSString* key in parameters) { + if (key.length > 0) { + [kvs addObject:[NSString stringWithFormat:@"%@=%@", key, parameters[key]]]; + } else { + [kvs addObject:parameters[key]]; + } + } + return [kvs componentsJoinedByString:@";"]; +} + +NSDictionary* stringToParameters(NSString* str) { + NSMutableDictionary* parameters = [NSMutableDictionary dictionary]; + NSArray* kvs = [str componentsSeparatedByString:@";"]; + for (NSString* kv in kvs) { + NSArray* kvArr = [kv componentsSeparatedByString:@"="]; + if (kvArr.count == 2) { + parameters[kvArr[0]] = kvArr[1]; + } else if (kvArr.count == 1) { + parameters[@""] = kvArr[0]; + } + } + return parameters; +} + +- (void)peerConnectionGetRtpReceiverCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* kind = argsMap[@"kind"]; + RTCRtpCapabilities* caps = + [self.peerConnectionFactory rtpReceiverCapabilitiesForKind:mediaTypeFromString(kind)]; + NSMutableArray* codecsMap = [NSMutableArray array]; + for (RTCRtpCodecCapability* c in caps.codecs) { + if ([kind isEqualToString:@"audio"]) { + [codecsMap addObject:@{ + @"channels" : c.numChannels, + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } else if ([kind isEqualToString:@"video"]) { + [codecsMap addObject:@{ + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } + } + result(@{ + @"codecs" : codecsMap, + @"headerExtensions" : @[], + @"fecMechanisms" : @[], + }); +} + +- (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* kind = argsMap[@"kind"]; + RTCRtpCapabilities* caps = + [self.peerConnectionFactory rtpSenderCapabilitiesForKind:mediaTypeFromString(kind)]; + NSMutableArray* codecsMap = [NSMutableArray array]; + for (RTCRtpCodecCapability* c in caps.codecs) { + if ([kind isEqualToString:@"audio"]) { + [codecsMap addObject:@{ + @"channels" : c.numChannels, + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } else if ([kind isEqualToString:@"video"]) { + [codecsMap addObject:@{ + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } + } + result(@{ + @"codecs" : codecsMap, + @"headerExtensions" : @[], + @"fecMechanisms" : @[], + }); +} + +-(RTC_OBJC_TYPE(RTCRtpCodecCapability) *) findCodecCapability:(NSString *)kind + codec:(NSString *)codec + parameters:(NSDictionary*)parameters { + RTCRtpCapabilities* caps = [self.peerConnectionFactory rtpSenderCapabilitiesForKind: [kind isEqualToString:@"video"]? kRTCMediaStreamTrackKindVideo : kRTCMediaStreamTrackKindAudio]; + for(RTCRtpCodecCapability* capCodec in caps.codecs) { + if([capCodec.name isEqualToString:codec] && [capCodec.kind isEqualToString:kind]) { + BOOL matched = YES; + for(NSString* key in capCodec.parameters) { + NSString *value = [capCodec.parameters objectForKey:key]; + NSString *value2 = [parameters objectForKey:key]; + if(![value isEqualToString:value2]) { + matched = NO; + } + } + if(matched) { + return capCodec; + } + } + } + return nil; +} + +- (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:@"transceiverSetCodecPreferencesFailed" + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError errorWithCode:@"transceiverSetCodecPreferencesFailed" + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + id codecs = argsMap[@"codecs"]; + NSMutableArray* codecCaps = [NSMutableArray array]; + for (id c in codecs) { + NSArray* kindAndName = [c[@"mimeType"] componentsSeparatedByString:@"/"]; + NSString* kind = [kindAndName[0] lowercaseString]; + NSString* name = kindAndName[1]; + NSLog(@"codec %@/%@", kind, name); + NSDictionary* parameters = nil; + if (c[@"sdpFmtpLine"] != nil && ![((NSString*)c[@"sdpFmtpLine"]) isEqualToString:@""]) { + parameters = stringToParameters((NSString*)c[@"sdpFmtpLine"]); + } + RTCRtpCodecCapability * codec = [self findCodecCapability:kind codec:name parameters:parameters]; + if(codec != nil) { + [codecCaps addObject:codec]; + } + } + [transcevier setCodecPreferences:codecCaps]; + result(nil); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.h b/common/darwin/Classes/FlutterRTCVideoRenderer.h new file mode 100644 index 0000000000..f644d57f86 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1,33 @@ +#import "FlutterWebRTCPlugin.h" + +#import +#import +#import +#import + +@interface FlutterRTCVideoRenderer + : NSObject + +/** + * The {@link RTCVideoTrack}, if any, which this instance renders. + */ +@property(nonatomic, strong) RTCVideoTrack* videoTrack; +@property(nonatomic) int64_t textureId; +@property(nonatomic, weak) id registry; +@property(nonatomic, strong) FlutterEventSink eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)dispose; + +@end + +@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; + +@end diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.m b/common/darwin/Classes/FlutterRTCVideoRenderer.m new file mode 100644 index 0000000000..8760444d2e --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1,297 @@ +#import "FlutterRTCVideoRenderer.h" + +#import +#import +#import +#import +#import + +#import + +#import "FlutterWebRTCPlugin.h" +#import + +@implementation FlutterRTCVideoRenderer { + CGSize _frameSize; + CGSize _renderSize; + CVPixelBufferRef _pixelBufferRef; + RTCVideoRotation _rotation; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; + bool _frameAvailable; + os_unfair_lock _lock; +} + +@synthesize textureId = _textureId; +@synthesize registry = _registry; +@synthesize eventSink = _eventSink; +@synthesize videoTrack = _videoTrack; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _isFirstFrameRendered = false; + _frameAvailable = false; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + _registry = registry; + _pixelBufferRef = nil; + _eventSink = nil; + _rotation = -1; + _textureId = [registry registerTexture:self]; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + return self; +} + +- (CVPixelBufferRef)copyPixelBuffer { + CVPixelBufferRef buffer = nil; + os_unfair_lock_lock(&_lock); + if (_pixelBufferRef != nil && _frameAvailable) { + buffer = CVBufferRetain(_pixelBufferRef); + _frameAvailable = false; + } + os_unfair_lock_unlock(&_lock); + return buffer; +} + +- (void)dispose { + os_unfair_lock_lock(&_lock); + [_registry unregisterTexture:_textureId]; + _textureId = -1; + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + _pixelBufferRef = nil; + } + _frameAvailable = false; + os_unfair_lock_unlock(&_lock); +} + +- (void)setVideoTrack:(RTCVideoTrack*)videoTrack { + RTCVideoTrack* oldValue = self.videoTrack; + if (oldValue != videoTrack) { + os_unfair_lock_lock(&_lock); + _videoTrack = videoTrack; + os_unfair_lock_unlock(&_lock); + _isFirstFrameRendered = false; + if (oldValue) { + [oldValue removeRenderer:self]; + } + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + if (videoTrack) { + [videoTrack addRenderer:self]; + } + } +} + +- (id)correctRotation:(const id)src + withRotation:(RTCVideoRotation)rotation { + int rotated_width = src.width; + int rotated_height = src.height; + + if (rotation == RTCVideoRotation_90 || rotation == RTCVideoRotation_270) { + int temp = rotated_width; + rotated_width = rotated_height; + rotated_height = temp; + } + + id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width + height:rotated_height]; + + [RTCYUVHelper I420Rotate:src.dataY + srcStrideY:src.strideY + srcU:src.dataU + srcStrideU:src.strideU + srcV:src.dataV + srcStrideV:src.strideV + dstY:(uint8_t*)buffer.dataY + dstStrideY:buffer.strideY + dstU:(uint8_t*)buffer.dataU + dstStrideU:buffer.strideU + dstV:(uint8_t*)buffer.dataV + dstStrideV:buffer.strideV + width:src.width + height:src.height + mode:rotation]; + + return buffer; +} + +- (void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer + withFrame:(RTCVideoFrame*)frame { + id i420Buffer = [self correctRotation:[frame.buffer toI420] + withRotation:frame.rotation]; + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + + [RTCYUVHelper I420ToARGB:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstARGB:dst + dstStrideARGB:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + [RTCYUVHelper I420ToBGRA:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstBGRA:dst + dstStrideBGRA:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } + } + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame*)frame { + + os_unfair_lock_lock(&_lock); + if(_videoTrack == nil) { + os_unfair_lock_unlock(&_lock); + return; + } + if(!_frameAvailable && _pixelBufferRef) { + [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + if(_textureId != -1) { + [_registry textureFrameAvailable:_textureId]; + } + _frameAvailable = true; + } + os_unfair_lock_unlock(&_lock); + + __weak FlutterRTCVideoRenderer* weakSelf = self; + if (_renderSize.width != frame.width || _renderSize.height != frame.height) { + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (strongSelf.eventSink) { + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeVideoSize", + @"id" : @(strongSelf.textureId), + @"width" : @(frame.width), + @"height" : @(frame.height), + }); + } + }); + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if (frame.rotation != _rotation) { + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (strongSelf.eventSink) { + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeRotation", + @"id" : @(strongSelf.textureId), + @"rotation" : @(frame.rotation), + }); + } + }); + + _rotation = frame.rotation; + } + + // Notify the Flutter new pixelBufferRef to be ready. + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (!strongSelf->_isFirstFrameRendered) { + if (strongSelf.eventSink) { + strongSelf.eventSink(@{@"event" : @"didFirstFrameRendered"}); + strongSelf->_isFirstFrameRendered = true; + } + } + }); +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + os_unfair_lock_lock(&_lock); + if (size.width != _frameSize.width || size.height != _frameSize.height) { + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + } + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); + _frameAvailable = false; + _frameSize = size; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger { + return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; +} + +- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack { + renderer.videoTrack = videoTrack; +} +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h new file mode 100644 index 0000000000..ee39d6345f --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1,96 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import +#import +#import "LocalTrack.h" + +@class FlutterRTCVideoRenderer; +@class FlutterRTCFrameCapturer; +@class FlutterRTCMediaRecorder; +@class AudioManager; + +void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event); + +typedef void (^CompletionHandler)(void); + +typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); + +@interface FlutterWebRTCPlugin : NSObject + +@property(nonatomic, strong) RTCPeerConnectionFactory* _Nullable peerConnectionFactory; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable peerConnections; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable localStreams; +@property(nonatomic, strong) NSMutableDictionary>* _Nullable localTracks; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; +@property(nonatomic, strong) NSMutableDictionary* recorders; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable videoCapturerStopHandlers; + +@property(nonatomic, strong) + NSMutableDictionary* _Nullable frameCryptors; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable keyProviders; + +#if TARGET_OS_IPHONE +@property(nonatomic, retain) + UIViewController* _Nullable viewController; /*for broadcast or ReplayKit */ +#endif + +@property(nonatomic, strong) FlutterEventSink _Nullable eventSink; +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer; +@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer; +@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; + +@property(nonatomic, strong) NSString* _Nonnull focusMode; +@property(nonatomic, strong) NSString* _Nonnull exposureMode; + +@property(nonatomic) BOOL _usingFrontCamera; +@property(nonatomic) NSInteger _lastTargetWidth; +@property(nonatomic) NSInteger _lastTargetHeight; +@property(nonatomic) NSInteger _lastTargetFps; + +@property(nonatomic, strong) AudioManager* _Nullable audioManager; + +- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (NSString*)audioTrackIdForVideoTrackId:(NSString*)videoTrackId; +- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nullable)Id; +- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream + ownerTag:(NSString* _Nullable)ownerTag; +- (NSDictionary* _Nullable)mediaTrackToMap:(RTCMediaStreamTrack* _Nonnull)track; +- (NSDictionary* _Nullable)receiverToMap:(RTCRtpReceiver* _Nonnull)receiver; +- (NSDictionary* _Nullable)transceiverToMap:(RTCRtpTransceiver* _Nonnull)transceiver; + +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId; + +- (BOOL)hasLocalAudioTrack; +- (void)ensureAudioSession; +- (void)deactiveRtcAudioSession; + +- (RTCRtpReceiver* _Nullable)getRtpReceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; +- (RTCRtpSender* _Nullable)getRtpSenderById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; + ++ (FlutterWebRTCPlugin* _Nullable)sharedSingleton; + +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m new file mode 100644 index 0000000000..73f01aebe1 --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1,2382 @@ +#import "FlutterWebRTCPlugin.h" +#import "AudioUtils.h" +#import "CameraUtils.h" +#import "FlutterRTCDataChannel.h" +#import "FlutterRTCDesktopCapturer.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCPeerConnection.h" +#import "FlutterRTCVideoRenderer.h" +#import "FlutterRTCFrameCryptor.h" +#if TARGET_OS_IPHONE +#import "FlutterRTCMediaRecorder.h" +#import "FlutterRTCVideoPlatformViewFactory.h" +#import "FlutterRTCVideoPlatformViewController.h" +#endif +#import "AudioManager.h" + +#import +#import +#import + +#import "LocalTrack.h" +#import "LocalAudioTrack.h" +#import "LocalVideoTrack.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wprotocol" + +@interface VideoEncoderFactory : RTCDefaultVideoEncoderFactory +@end + +@interface VideoDecoderFactory : RTCDefaultVideoDecoderFactory +@end + +@interface VideoEncoderFactorySimulcast : RTCVideoEncoderFactorySimulcast +@end + +NSArray* motifyH264ProfileLevelId( + NSArray* codecs) { + NSMutableArray* newCodecs = [[NSMutableArray alloc] init]; + NSInteger count = codecs.count; + for (NSInteger i = 0; i < count; i++) { + RTC_OBJC_TYPE(RTCVideoCodecInfo)* info = [codecs objectAtIndex:i]; + if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { + NSString* hexString = info.parameters[@"profile-level-id"]; + RTCH264ProfileLevelId* profileLevelId = + [[RTCH264ProfileLevelId alloc] initWithHexString:hexString]; + if (profileLevelId.level < RTCH264Level5_1) { + RTCH264ProfileLevelId* newProfileLevelId = + [[RTCH264ProfileLevelId alloc] initWithProfile:profileLevelId.profile + level:RTCH264Level5_1]; + // NSLog(@"profile-level-id: %@ => %@", hexString, [newProfileLevelId hexString]); + NSMutableDictionary* parametersCopy = [[NSMutableDictionary alloc] init]; + [parametersCopy addEntriesFromDictionary:info.parameters]; + [parametersCopy setObject:[newProfileLevelId hexString] forKey:@"profile-level-id"]; + [newCodecs insertObject:[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name + parameters:parametersCopy] + atIndex:i]; + } else { + [newCodecs insertObject:info atIndex:i]; + } + } else { + [newCodecs insertObject:info atIndex:i]; + } + } + return newCodecs; +} + +@implementation VideoEncoderFactory +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +@implementation VideoDecoderFactory +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +@implementation VideoEncoderFactorySimulcast +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event) { + dispatch_async(dispatch_get_main_queue(), ^{ + sink(event); + }); +} + +@implementation FlutterWebRTCPlugin { +#pragma clang diagnostic pop + FlutterMethodChannel* _methodChannel; + FlutterEventSink _eventSink; + FlutterEventChannel* _eventChannel; + id _registry; + id _messenger; + id _textures; + BOOL _speakerOn; + BOOL _speakerOnButPreferBluetooth; + AVAudioSessionPort _preferredInput; + AudioManager* _audioManager; +#if TARGET_OS_IPHONE + FLutterRTCVideoPlatformViewFactory *_platformViewFactory; +#endif +} + +static FlutterWebRTCPlugin *sharedSingleton; + ++ (FlutterWebRTCPlugin *)sharedSingleton +{ + @synchronized(self) + { + return sharedSingleton; + } +} + +@synthesize messenger = _messenger; +@synthesize eventSink = _eventSink; +@synthesize preferredInput = _preferredInput; +@synthesize audioManager = _audioManager; + ++ (void)registerWithRegistrar:(NSObject*)registrar { + FlutterMethodChannel* channel = + [FlutterMethodChannel methodChannelWithName:@"FlutterWebRTC.Method" + binaryMessenger:[registrar messenger]]; +#if TARGET_OS_IPHONE + UIViewController* viewController = (UIViewController*)registrar.messenger; +#endif + FlutterWebRTCPlugin* instance = + [[FlutterWebRTCPlugin alloc] initWithChannel:channel + registrar:registrar + messenger:[registrar messenger] +#if TARGET_OS_IPHONE + viewController:viewController +#endif + withTextures:[registrar textures]]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithChannel:(FlutterMethodChannel*)channel + registrar:(NSObject*)registrar + messenger:(NSObject*)messenger +#if TARGET_OS_IPHONE + viewController:(UIViewController*)viewController +#endif + withTextures:(NSObject*)textures { + + self = [super init]; + sharedSingleton = self; + + FlutterEventChannel* eventChannel = + [FlutterEventChannel eventChannelWithName:@"FlutterWebRTC.Event" binaryMessenger:messenger]; + [eventChannel setStreamHandler:self]; + + if (self) { + _methodChannel = channel; + _registry = registrar; + _textures = textures; + _messenger = messenger; + _speakerOn = NO; + _speakerOnButPreferBluetooth = NO; + _eventChannel = eventChannel; + _audioManager = AudioManager.sharedInstance; + +#if TARGET_OS_IPHONE + _preferredInput = AVAudioSessionPortHeadphones; + self.viewController = viewController; + _platformViewFactory = [[FLutterRTCVideoPlatformViewFactory alloc] initWithMessenger:messenger]; + [registrar registerViewFactory:_platformViewFactory withId:FLutterRTCVideoPlatformViewFactoryID]; +#endif + } + + NSDictionary* fieldTrials = @{kRTCFieldTrialUseNWPathMonitor : kRTCFieldTrialEnabledValue}; + RTCInitFieldTrialDictionary(fieldTrials); + + self.peerConnections = [NSMutableDictionary new]; + self.localStreams = [NSMutableDictionary new]; + self.localTracks = [NSMutableDictionary new]; + self.renders = [NSMutableDictionary new]; + self.frameCryptors = [NSMutableDictionary new]; + self.keyProviders = [NSMutableDictionary new]; + self.videoCapturerStopHandlers = [NSMutableDictionary new]; + self.recorders = [NSMutableDictionary new]; +#if TARGET_OS_IPHONE + self.focusMode = @"locked"; + self.exposureMode = @"locked"; + AVAudioSession* session = [AVAudioSession sharedInstance]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(didSessionRouteChange:) + name:AVAudioSessionRouteChangeNotification + object:session]; +#endif +#if TARGET_OS_OSX + [_peerConnectionFactory.audioDeviceModule setDevicesUpdatedHandler:^(void) { + NSLog(@"Handle Devices Updated!"); + if (self.eventSink) { + postEvent( self.eventSink, @{@"event" : @"onDeviceChange"}); + } + }]; +#endif + return self; +} + +- (void)detachFromEngineForRegistrar:(NSObject*)registrar { + for (RTCPeerConnection* peerConnection in _peerConnections.allValues) { + for (RTCDataChannel* dataChannel in peerConnection.dataChannels) { + dataChannel.eventSink = nil; + } + peerConnection.eventSink = nil; + } + _eventSink = nil; +} + +#pragma mark - FlutterStreamHandler methods + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} + +- (void)didSessionRouteChange:(NSNotification*)notification { +#if TARGET_OS_IPHONE + NSDictionary* interuptionDict = notification.userInfo; + NSInteger routeChangeReason = + [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; + if (self.eventSink && + (routeChangeReason == AVAudioSessionRouteChangeReasonNewDeviceAvailable || + routeChangeReason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable || + routeChangeReason == AVAudioSessionRouteChangeReasonCategoryChange || + routeChangeReason == AVAudioSessionRouteChangeReasonOverride)) { + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + } +#endif +} + +- (void)initialize:(NSArray*)networkIgnoreMask +bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { + // RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); + if (!_peerConnectionFactory) { + VideoDecoderFactory* decoderFactory = [[VideoDecoderFactory alloc] init]; + VideoEncoderFactory* encoderFactory = [[VideoEncoderFactory alloc] init]; + + VideoEncoderFactorySimulcast* simulcastFactory = + [[VideoEncoderFactorySimulcast alloc] initWithPrimary:encoderFactory fallback:encoderFactory]; + + _peerConnectionFactory = + [[RTCPeerConnectionFactory alloc] initWithBypassVoiceProcessing:bypassVoiceProcessing + encoderFactory:simulcastFactory + decoderFactory:decoderFactory + audioProcessingModule:_audioManager.audioProcessingModule]; + + RTCPeerConnectionFactoryOptions *options = [[RTCPeerConnectionFactoryOptions alloc] init]; + for (NSString* adapter in networkIgnoreMask) + { + if ([@"adapterTypeEthernet" isEqualToString:adapter]) { + options.ignoreEthernetNetworkAdapter = YES; + } else if ([@"adapterTypeWifi" isEqualToString:adapter]) { + options.ignoreWiFiNetworkAdapter = YES; + } else if ([@"adapterTypeCellular" isEqualToString:adapter]) { + options.ignoreCellularNetworkAdapter = YES; + } else if ([@"adapterTypeVpn" isEqualToString:adapter]) { + options.ignoreVPNNetworkAdapter = YES; + } else if ([@"adapterTypeLoopback" isEqualToString:adapter]) { + options.ignoreLoopbackNetworkAdapter = YES; + } else if ([@"adapterTypeAny" isEqualToString:adapter]) { + options.ignoreEthernetNetworkAdapter = YES; + options.ignoreWiFiNetworkAdapter = YES; + options.ignoreCellularNetworkAdapter = YES; + options.ignoreVPNNetworkAdapter = YES; + options.ignoreLoopbackNetworkAdapter = YES; + } + } + + [_peerConnectionFactory setOptions: options]; + } +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + if ([@"initialize" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* options = argsMap[@"options"]; + BOOL enableBypassVoiceProcessing = NO; + if(options[@"bypassVoiceProcessing"] != nil){ + enableBypassVoiceProcessing = ((NSNumber*)options[@"bypassVoiceProcessing"]).boolValue; + } + NSArray* networkIgnoreMask = [NSArray new]; + if (options[@"networkIgnoreMask"] != nil) { + networkIgnoreMask = ((NSArray*)options[@"networkIgnoreMask"]); + } + [self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing]; + result(@""); + } else if ([@"createPeerConnection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + NSDictionary* constraints = argsMap[@"constraints"]; + + RTCPeerConnection* peerConnection = [self.peerConnectionFactory + peerConnectionWithConfiguration:[self RTCConfiguration:configuration] + constraints:[self parseMediaConstraints:constraints] + delegate:self]; + + peerConnection.remoteStreams = [NSMutableDictionary new]; + peerConnection.remoteTracks = [NSMutableDictionary new]; + peerConnection.dataChannels = [NSMutableDictionary new]; + + NSString* peerConnectionId = [[NSUUID UUID] UUIDString]; + peerConnection.flutterId = peerConnectionId; + + /*Create Event Channel.*/ + peerConnection.eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectionEvent%@", + peerConnectionId] + binaryMessenger:_messenger]; + [peerConnection.eventChannel setStreamHandler:peerConnection]; + + self.peerConnections[peerConnectionId] = peerConnection; + result(@{@"peerConnectionId" : peerConnectionId}); + } else if ([@"getUserMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getUserMedia:constraints result:result]; + } else if ([@"getDisplayMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getDisplayMedia:constraints result:result]; + } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { + [self createLocalMediaStream:result]; + } else if ([@"getSources" isEqualToString:call.method]) { + [self getSources:result]; + } else if ([@"selectAudioInput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioInput:deviceId result:result]; + } else if ([@"selectAudioOutput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioOutput:deviceId result:result]; + } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + [self mediaStreamGetTracks:streamId result:result]; + } else if ([@"createOffer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createAnswer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionCreateAnswer:constraints peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream* stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection && stream) { + [peerConnection addStream:stream]; + result(@""); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString + stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"removeStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream* stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection && stream) { + [peerConnection removeStream:stream]; + result(nil); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString + stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"captureFrame" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"setLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary* descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType + sdp:sdp]; + if (peerConnection) { + [self peerConnectionSetLocalDescription:description + peerConnection:peerConnection + result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary* descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType + sdp:sdp]; + + if (peerConnection) { + [self peerConnectionSetRemoteDescription:description + peerConnection:peerConnection + result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"sendDtmf" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* tone = argsMap[@"tone"]; + int duration = ((NSNumber*)argsMap[@"duration"]).intValue; + int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; + + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCRtpSender* audioSender = nil; + for (RTCRtpSender* rtpSender in peerConnection.senders) { + if ([[[rtpSender track] kind] isEqualToString:@"audio"]) { + audioSender = rtpSender; + } + } + if (audioSender) { + NSOperationQueue* queue = [[NSOperationQueue alloc] init]; + [queue addOperationWithBlock:^{ + double durationMs = duration / 1000.0; + double interToneGapMs = interToneGap / 1000.0; + [audioSender.dtmfSender insertDtmf:(NSString*)tone + duration:(NSTimeInterval)durationMs + interToneGap:(NSTimeInterval)interToneGapMs]; + NSLog(@"DTMF Tone played "); + }]; + } + + result(@{@"result" : @"success"}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addCandidate" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* candMap = argsMap[@"candidate"]; + NSString* sdp = candMap[@"candidate"]; + id sdpMLineIndexValue = candMap[@"sdpMLineIndex"]; + int sdpMLineIndex = 0; + if (![sdpMLineIndexValue isKindOfClass:[NSNull class]]) { + sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; + } + NSString* sdpMid = candMap[@"sdpMid"]; + + RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp + sdpMLineIndex:sdpMLineIndex + sdpMid:sdpMid]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection) { + [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getStats" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + id trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + if (trackId != nil && trackId != [NSNull null]) { + return [self peerConnectionGetStatsForTrackId:trackId + peerConnection:peerConnection + result:result]; + } else { + return [self peerConnectionGetStats:peerConnection result:result]; + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createDataChannel" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* label = argsMap[@"label"]; + NSDictionary* dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; + [self createDataChannel:peerConnectionId + label:label + config:[self RTCDataChannelConfiguration:dataChannelDict] + messenger:_messenger + result:result]; + } else if ([@"dataChannelSend" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + NSString* type = argsMap[@"type"]; + id data = argsMap[@"data"]; + + [self dataChannelSend:peerConnectionId dataChannelId:dataChannelId data:data type:type]; + result(nil); + } else if ([@"dataChannelGetBufferedAmount" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + + [self dataChannelGetBufferedAmount:peerConnectionId dataChannelId:dataChannelId result:result]; + } + else if ([@"dataChannelClose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + [self dataChannelClose:peerConnectionId dataChannelId:dataChannelId]; + result(nil); + } else if ([@"streamDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + RTCMediaStream* stream = self.localStreams[streamId]; + BOOL shouldCallResult = YES; + if (stream) { + for (RTCVideoTrack* track in stream.videoTracks) { + [_localTracks removeObjectForKey:track.trackId]; + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:videoTrack.trackId]; + if(renderer != nil) { + renderer.videoTrack = nil; + } + CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[videoTrack.trackId]; + if (stopHandler) { + shouldCallResult = NO; + stopHandler(^{ + NSLog(@"video capturer stopped, trackID = %@", videoTrack.trackId); + self.videoCapturer = nil; + result(nil); + }); + [self.videoCapturerStopHandlers removeObjectForKey:videoTrack.trackId]; + } + } + for (RTCAudioTrack* track in stream.audioTracks) { + [_localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:streamId]; + [self deactiveRtcAudioSession]; + } + if (shouldCallResult) { + // do not call if will be called in stopCapturer above. + result(nil); + } + } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* enabled = argsMap[@"enabled"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil) { + track.isEnabled = enabled.boolValue; + } + result(nil); + } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStream* stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (track != nil) { + if ([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + [stream addAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + [stream addVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" + message:nil + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" + message:nil + details:nil]); + } + result(nil); + } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStream* stream = self.localStreams[streamId]; + if (stream) { + id track = self.localTracks[trackId]; + if (track != nil) { + if ([track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; + [stream removeAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [stream removeVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" + message:nil + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" + message:nil + details:nil]); + } + result(nil); + } else if ([@"trackDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL audioTrack = NO; + for (NSString* streamId in self.localStreams) { + RTCMediaStream* stream = [self.localStreams objectForKey:streamId]; + for (RTCAudioTrack* track in stream.audioTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream removeAudioTrack:track]; + audioTrack = YES; + } + } + for (RTCVideoTrack* track in stream.videoTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream removeVideoTrack:track]; + CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[track.trackId]; + if (stopHandler) { + stopHandler(^{ + NSLog(@"video capturer stopped, trackID = %@", track.trackId); + }); + [self.videoCapturerStopHandlers removeObjectForKey:track.trackId]; + } + } + } + } + [_localTracks removeObjectForKey:trackId]; + if (audioTrack) { + [self ensureAudioSession]; + } + FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:trackId]; + if(renderer != nil) { + renderer.videoTrack = nil; + } + result(nil); + } else if ([@"restartIce" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (!peerConnection) { + result([FlutterError errorWithCode:@"restartIce: peerConnection is nil" + message:nil + details:nil]); + } else { + [peerConnection restartIce]; + result(nil); + } + } else if ([@"peerConnectionClose" isEqualToString:call.method] || + [@"peerConnectionDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [peerConnection close]; + [self.peerConnections removeObjectForKey:peerConnectionId]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + for (NSString* dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; + } + [self deactiveRtcAudioSession]; + result(nil); + } else if ([@"createVideoRenderer" isEqualToString:call.method]) { + FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures + messenger:_messenger]; + self.renders[@(render.textureId)] = render; + result(@{@"textureId" : @(render.textureId)}); + } else if ([@"videoRendererDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer* render = self.renders[textureId]; + if(render != nil) { + render.videoTrack = nil; + [render dispose]; + [self.renders removeObjectForKey:textureId]; + } + result(nil); + } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer* render = self.renders[textureId]; + NSString* streamId = argsMap[@"streamId"]; + NSString* ownerTag = argsMap[@"ownerTag"]; + NSString* trackId = argsMap[@"trackId"]; + if (!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" + message:nil + details:nil]); + return; + } + RTCMediaStream* stream = nil; + RTCVideoTrack* videoTrack = nil; + if ([ownerTag isEqualToString:@"local"]) { + stream = _localStreams[streamId]; + } + if (!stream) { + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if (stream) { + NSArray* videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + for (RTCVideoTrack* track in videoTracks) { + if ([track.trackId isEqualToString:trackId]) { + videoTrack = track; + } + } + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + [self rendererSetSrcObject:render stream:videoTrack]; + result(nil); + } +#if TARGET_OS_IPHONE + else if ([@"videoPlatformViewRendererSetSrcObject" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* viewId = argsMap[@"viewId"]; + FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; + NSString* streamId = argsMap[@"streamId"]; + NSString* ownerTag = argsMap[@"ownerTag"]; + NSString* trackId = argsMap[@"trackId"]; + if (!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" + message:nil + details:nil]); + return; + } + RTCMediaStream* stream = nil; + RTCVideoTrack* videoTrack = nil; + if ([ownerTag isEqualToString:@"local"]) { + stream = _localStreams[streamId]; + } + if (!stream) { + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if (stream) { + NSArray* videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + for (RTCVideoTrack* track in videoTracks) { + if ([track.trackId isEqualToString:trackId]) { + videoTrack = track; + } + } + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + render.videoTrack = videoTrack; + result(nil); + } else if ([@"videoPlatformViewRendererDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* viewId = argsMap[@"viewId"]; + FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; + if(render != nil) { + render.videoTrack = nil; + [_platformViewFactory.renders removeObjectForKey:viewId]; + } + result(nil); + } +#endif + else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackHasTorch:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL torch = [argsMap[@"torch"] boolValue]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetZoom" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + double zoomLevel = [argsMap[@"zoomLevel"] doubleValue]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackSetZoom:videoTrack zoomLevel:zoomLevel result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* focusMode = argsMap[@"focusMode"]; + id track = self.localTracks[trackId]; + if (track != nil && focusMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusMode:videoTrack focusMode:focusMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusPoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* focusPoint = argsMap[@"focusPoint"]; + id track = self.localTracks[trackId]; + if (track != nil && focusPoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusPoint:videoTrack focusPoint:focusPoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposureMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* exposureMode = argsMap[@"exposureMode"]; + id track = self.localTracks[trackId]; + if (track != nil && exposureMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposureMode:videoTrack exposureMode:exposureMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposurePoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* exposurePoint = argsMap[@"exposurePoint"]; + id track = self.localTracks[trackId]; + if (track != nil && exposurePoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposurePoint:videoTrack exposurePoint:exposurePoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSwitchCamera:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"setVolume" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* volume = argsMap[@"volume"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + RTCAudioSource* audioSource = audioTrack.source; + audioSource.volume = [volume doubleValue]; + } + result(nil); + } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* mute = argsMap[@"mute"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; + audioTrack.isEnabled = !mute.boolValue; + } + result(nil); + } +#if TARGET_OS_IPHONE + else if ([@"enableSpeakerphone" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* enable = argsMap[@"enable"]; + _speakerOn = enable.boolValue; + _speakerOnButPreferBluetooth = NO; + [AudioUtils setSpeakerphoneOn:_speakerOn]; + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + result(nil); + } + else if ([@"ensureAudioSession" isEqualToString:call.method]) { + [self ensureAudioSession]; + result(nil); + } + else if ([@"enableSpeakerphoneButPreferBluetooth" isEqualToString:call.method]) { + _speakerOn = YES; + _speakerOnButPreferBluetooth = YES; + [AudioUtils setSpeakerphoneOnButPreferBluetooth]; + result(nil); + } + else if([@"setAppleAudioConfiguration" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + [AudioUtils setAppleAudioConfiguration:configuration]; + result(nil); + } +#endif + else if ([@"getLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCSessionDescription* sdp = peerConnection.localDescription; + if (nil == sdp) { + result(nil); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCSessionDescription* sdp = peerConnection.remoteDescription; + if (nil == sdp) { + result(nil); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setConfiguration" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* configuration = argsMap[@"configuration"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] + peerConnection:peerConnection]; + result(nil); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; + if (sender == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection.addTrack failed!"] + details:nil]); + return; + } + + result([self rtpSenderToMap:sender]); + } else if ([@"removeTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + result(@{@"result" : @([peerConnection removeTrack:sender])}); + } else if ([@"addTransceiver" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* mediaType = argsMap[@"mediaType"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transceiver = nil; + BOOL hasAudio = NO; + if (trackId != nil) { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit* init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverWithTrack:track init:init]; + } else { + transceiver = [peerConnection addTransceiverWithTrack:track]; + } + if ([track.kind isEqualToString:@"audio"]) { + hasAudio = YES; + } + } else if (mediaType != nil) { + RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit* init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; + } else { + transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; + } + if (rtpMediaType == RTCRtpMediaTypeAudio) { + hasAudio = YES; + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] + details:nil]); + return; + } + + if (transceiver == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] + details:nil]); + return; + } + + result([self transceiverToMap:transceiver]); + } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* direction = argsMap[@"direction"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; + result(nil); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method] || + [@"rtpTransceiverGetDirection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + + if ([@"rtpTransceiverGetDirection" isEqualToString:call.method]) { + result(@{@"result" : [self transceiverDirectionString:transcevier.direction]}); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]) { + RTCRtpTransceiverDirection directionOut = transcevier.direction; + if ([transcevier currentDirection:&directionOut]) { + result(@{@"result" : [self transceiverDirectionString:directionOut]}); + } else { + result(nil); + } + } + } else if ([@"rtpTransceiverStop" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier stopInternal]; + result(nil); + } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSDictionary* parameters = argsMap[@"parameters"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setParameters:[self updateRtpParameters:sender.parameters with:parameters]]; + + result(@{@"result" : @(YES)}); + } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack* track = nil; + if ([trackId length] > 0) { + track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack* track = nil; + if ([trackId length] > 0) { + track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetStreams" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setStreamIds:streamIds]; + result(nil); + } else if ([@"getSenders" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* senders = [NSMutableArray array]; + for (RTCRtpSender* sender in peerConnection.senders) { + [senders addObject:[self rtpSenderToMap:sender]]; + } + + result(@{@"senders" : senders}); + } else if ([@"getReceivers" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* receivers = [NSMutableArray array]; + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + [receivers addObject:[self receiverToMap:receiver]]; + } + + result(@{@"receivers" : receivers}); + } else if ([@"getTransceivers" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* transceivers = [NSMutableArray array]; + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + [transceivers addObject:[self transceiverToMap:transceiver]]; + } + + result(@{@"transceivers" : transceivers}); + } else if ([@"getDesktopSources" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self getDesktopSources:argsMap result:result]; + } else if ([@"updateDesktopSources" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self updateDesktopSources:argsMap result:result]; + } else if ([@"getDesktopSourceThumbnail" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self getDesktopSourceThumbnail:argsMap result:result]; + } else if ([@"setCodecPreferences" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self transceiverSetCodecPreferences:argsMap result:result]; + } else if ([@"getRtpReceiverCapabilities" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self peerConnectionGetRtpReceiverCapabilities:argsMap result:result]; + } else if ([@"getRtpSenderCapabilities" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self peerConnectionGetRtpSenderCapabilities:argsMap result:result]; + } else if ([@"getSignalingState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForSignalingState:peerConnection.signalingState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getIceGatheringState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForICEGatheringState:peerConnection.iceGatheringState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getIceConnectionState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForICEConnectionState:peerConnection.iceConnectionState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getConnectionState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForPeerConnectionState:peerConnection.connectionState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } +#if TARGET_OS_IOS + } else if ([@"startRecordToFile" isEqualToString:call.method]){ + + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"videoTrackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* audioTrackId = [self audioTrackIdForVideoTrackId:trackId]; + + RTCMediaStreamTrack *track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + RTCMediaStreamTrack *audioTrack = [self trackForId:audioTrackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + NSURL* pathUrl = [NSURL fileURLWithPath:path]; + self.recorders[recorderId] = [[FlutterRTCMediaRecorder alloc] + initWithVideoTrack:(RTCVideoTrack *)track + audioTrack:(RTCAudioTrack *)audioTrack + outputFile:pathUrl + ]; + } + result(nil); + } else if ([@"stopRecordToFile" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + FlutterRTCMediaRecorder* recorder = self.recorders[recorderId]; + if (recorder != nil) { + [recorder stop:result]; + [self.recorders removeObjectForKey:recorderId]; + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: recorder with id %@ not found!",recorderId] + details:nil]); + } +#endif + } else { + [self handleFrameCryptorMethodCall:call result:result]; + } +} + +- (void)dealloc { + [_localTracks removeAllObjects]; + _localTracks = nil; + [_localStreams removeAllObjects]; + _localStreams = nil; + + for (NSString* peerConnectionId in _peerConnections) { + RTCPeerConnection* peerConnection = _peerConnections[peerConnectionId]; + peerConnection.delegate = nil; + [peerConnection close]; + } + [_peerConnections removeAllObjects]; + _peerConnectionFactory = nil; +} + +- (BOOL)hasLocalAudioTrack { + for (id key in _localTracks.allKeys) { + id track = [_localTracks objectForKey:key]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + return YES; + } + } + return NO; +} + +- (void)ensureAudioSession { +#if TARGET_OS_IPHONE + [AudioUtils ensureAudioSessionWithRecording:[self hasLocalAudioTrack]]; +#endif +} + +- (void)deactiveRtcAudioSession { +#if TARGET_OS_IPHONE + if (![self hasLocalAudioTrack] && self.peerConnections.count == 0) { + [AudioUtils deactiveRtcAudioSession]; + } +#endif +} + +- (void)mediaStreamGetTracks:(NSString*)streamId result:(FlutterResult)result { + RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; + if (stream) { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + NSString* trackId = track.trackId; + [self.localTracks setObject:[[LocalAudioTrack alloc] initWithTrack:(RTCAudioTrack *)track] forKey:trackId]; + [audioTracks addObject:@{ + @"enabled" : @(track.isEnabled), + @"id" : trackId, + @"kind" : track.kind, + @"label" : trackId, + @"readyState" : @"live", + @"remote" : @(NO) + }]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + NSString* trackId = track.trackId; + [_localTracks setObject:[[LocalVideoTrack alloc] initWithTrack:(RTCVideoTrack *)track] + forKey:trackId]; + [videoTracks addObject:@{ + @"enabled" : @(track.isEnabled), + @"id" : trackId, + @"kind" : track.kind, + @"label" : trackId, + @"readyState" : @"live", + @"remote" : @(NO) + }]; + } + + result(@{@"audioTracks" : audioTracks, @"videoTracks" : videoTracks}); + } else { + result(nil); + } +} + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString*)peerConnectionId { + RTCMediaStream* stream = nil; + if (peerConnectionId.length > 0) { + RTCPeerConnection* peerConnection = [_peerConnections objectForKey:peerConnectionId]; + stream = peerConnection.remoteStreams[streamId]; + } else { + for (RTCPeerConnection* peerConnection in _peerConnections.allValues) { + stream = peerConnection.remoteStreams[streamId]; + if (stream) { + break; + } + } + } + if (!stream) { + stream = _localStreams[streamId]; + } + return stream; +} + +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId { + RTCMediaStreamTrack *mediaStreamTrack = nil; + for (NSString* currentId in _peerConnections.allKeys) { + RTCPeerConnection* peerConnection = _peerConnections[currentId]; + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver.track != nil && + [transceiver.receiver.track.trackId isEqual:trackId]) { + mediaStreamTrack = transceiver.receiver.track; + break; + } + } + } + if (mediaStreamTrack) { + break; + } + } + + return mediaStreamTrack; +} + +- (NSString *)audioTrackIdForVideoTrackId:(NSString *)videoTrackId { + NSString *audioTrackId = nil; + + // Iterate through all peerConnections + for (NSString *peerConnectionId in self.peerConnections) { + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + // Iterate through the receivers to find the video track + for (RTCRtpReceiver *receiver in peerConnection.receivers) { + RTCMediaStreamTrack *track = [receiver valueForKey:@"track"]; + if ([track.kind isEqualToString:@"video"] && [track.trackId isEqualToString:videoTrackId]) { + // Found the video track, now look for the audio track in the same peerConnection + for (RTCRtpReceiver *audioReceiver in peerConnection.receivers) { + RTCMediaStreamTrack *audioTrack = [audioReceiver valueForKey:@"track"]; + if ([audioTrack.kind isEqualToString:@"audio"]) { + audioTrackId = audioTrack.trackId; + break; + } + } + break; + } + } + + // If the audioTrackId is found, break out of the loop + if (audioTrackId != nil) { + break; + } + } + + return audioTrackId; +} + +- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId peerConnectionId:(NSString*)peerConnectionId { + id track = _localTracks[trackId]; + RTCMediaStreamTrack *mediaStreamTrack = nil; + if (!track) { + for (NSString* currentId in _peerConnections.allKeys) { + if (peerConnectionId && [currentId isEqualToString:peerConnectionId] == false) { + continue; + } + RTCPeerConnection* peerConnection = _peerConnections[currentId]; + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver.track != nil && + [transceiver.receiver.track.trackId isEqual:trackId]) { + mediaStreamTrack = transceiver.receiver.track; + break; + } + } + } + if (mediaStreamTrack) { + break; + } + } + } else { + mediaStreamTrack = [track track]; + } + return mediaStreamTrack; +} + +- (RTCIceServer*)RTCIceServer:(id)json { + if (!json) { + NSLog(@"a valid iceServer value"); + return nil; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return nil; + } + + NSArray* urls; + if ([json[@"url"] isKindOfClass:[NSString class]]) { + // TODO: 'url' is non-standard + urls = @[ json[@"url"] ]; + } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { + urls = @[ json[@"urls"] ]; + } else { + urls = (NSArray*)json[@"urls"]; + } + + if (json[@"username"] != nil || json[@"credential"] != nil) { + return [[RTCIceServer alloc] initWithURLStrings:urls + username:json[@"username"] + credential:json[@"credential"]]; + } + + return [[RTCIceServer alloc] initWithURLStrings:urls]; +} + +- (nonnull RTCConfiguration*)RTCConfiguration:(id)json { + RTCConfiguration* config = [[RTCConfiguration alloc] init]; + + if (!json) { + return config; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return config; + } + + if (json[@"audioJitterBufferMaxPackets"] != nil && + [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; + } + + if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { + NSString* bundlePolicy = json[@"bundlePolicy"]; + if ([bundlePolicy isEqualToString:@"balanced"]) { + config.bundlePolicy = RTCBundlePolicyBalanced; + } else if ([bundlePolicy isEqualToString:@"max-compat"]) { + config.bundlePolicy = RTCBundlePolicyMaxCompat; + } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { + config.bundlePolicy = RTCBundlePolicyMaxBundle; + } + } + + if (json[@"iceBackupCandidatePairPingInterval"] != nil && + [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + config.iceBackupCandidatePairPingInterval = + [json[@"iceBackupCandidatePairPingInterval"] intValue]; + } + + if (json[@"iceConnectionReceivingTimeout"] != nil && + [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; + } + + if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { + NSMutableArray* iceServers = [NSMutableArray new]; + for (id server in json[@"iceServers"]) { + RTCIceServer* convert = [self RTCIceServer:server]; + if (convert != nil) { + [iceServers addObject:convert]; + } + } + config.iceServers = iceServers; + } + + if (json[@"iceTransportPolicy"] != nil && + [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { + NSString* iceTransportPolicy = json[@"iceTransportPolicy"]; + if ([iceTransportPolicy isEqualToString:@"all"]) { + config.iceTransportPolicy = RTCIceTransportPolicyAll; + } else if ([iceTransportPolicy isEqualToString:@"none"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNone; + } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNoHost; + } else if ([iceTransportPolicy isEqualToString:@"relay"]) { + config.iceTransportPolicy = RTCIceTransportPolicyRelay; + } + } + + if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { + NSString* rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; + if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; + } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; + } + } + + if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { + NSString* sdpSemantics = json[@"sdpSemantics"]; + if ([sdpSemantics isEqualToString:@"plan-b"]) { + config.sdpSemantics = RTCSdpSemanticsPlanB; + } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { + config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; + } + } + + if (json[@"maxIPv6Networks"] != nil && [json[@"maxIPv6Networks"] isKindOfClass:[NSNumber class]]) { + NSNumber* maxIPv6Networks = json[@"maxIPv6Networks"]; + config.maxIPv6Networks = [maxIPv6Networks intValue]; + } + + // === below is private api in webrtc === + if (json[@"tcpCandidatePolicy"] != nil && + [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { + NSString* tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; + if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; + } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; + } + } + + // candidateNetworkPolicy (private api) + if (json[@"candidateNetworkPolicy"] != nil && + [json[@"candidateNetworkPolicy"] isKindOfClass:[NSString class]]) { + NSString* candidateNetworkPolicy = json[@"candidateNetworkPolicy"]; + if ([candidateNetworkPolicy isEqualToString:@"all"]) { + config.candidateNetworkPolicy = RTCCandidateNetworkPolicyAll; + } else if ([candidateNetworkPolicy isEqualToString:@"low_cost"]) { + config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost; + } + } + + // KeyType (private api) + if (json[@"keyType"] != nil && [json[@"keyType"] isKindOfClass:[NSString class]]) { + NSString* keyType = json[@"keyType"]; + if ([keyType isEqualToString:@"RSA"]) { + config.keyType = RTCEncryptionKeyTypeRSA; + } else if ([keyType isEqualToString:@"ECDSA"]) { + config.keyType = RTCEncryptionKeyTypeECDSA; + } + } + + // continualGatheringPolicy (private api) + if (json[@"continualGatheringPolicy"] != nil && + [json[@"continualGatheringPolicy"] isKindOfClass:[NSString class]]) { + NSString* continualGatheringPolicy = json[@"continualGatheringPolicy"]; + if ([continualGatheringPolicy isEqualToString:@"gather_once"]) { + config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherOnce; + } else if ([continualGatheringPolicy isEqualToString:@"gather_continually"]) { + config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; + } + } + + // audioJitterBufferMaxPackets (private api) + if (json[@"audioJitterBufferMaxPackets"] != nil && + [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + NSNumber* audioJitterBufferMaxPackets = json[@"audioJitterBufferMaxPackets"]; + config.audioJitterBufferMaxPackets = [audioJitterBufferMaxPackets intValue]; + } + + // iceConnectionReceivingTimeout (private api) + if (json[@"iceConnectionReceivingTimeout"] != nil && + [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + NSNumber* iceConnectionReceivingTimeout = json[@"iceConnectionReceivingTimeout"]; + config.iceConnectionReceivingTimeout = [iceConnectionReceivingTimeout intValue]; + } + + // iceBackupCandidatePairPingInterval (private api) + if (json[@"iceBackupCandidatePairPingInterval"] != nil && + [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + NSNumber* iceBackupCandidatePairPingInterval = json[@"iceConnectionReceivingTimeout"]; + config.iceBackupCandidatePairPingInterval = [iceBackupCandidatePairPingInterval intValue]; + } + + // audioJitterBufferFastAccelerate (private api) + if (json[@"audioJitterBufferFastAccelerate"] != nil && + [json[@"audioJitterBufferFastAccelerate"] isKindOfClass:[NSNumber class]]) { + NSNumber* audioJitterBufferFastAccelerate = json[@"audioJitterBufferFastAccelerate"]; + config.audioJitterBufferFastAccelerate = [audioJitterBufferFastAccelerate boolValue]; + } + + // pruneTurnPorts (private api) + if (json[@"pruneTurnPorts"] != nil && [json[@"pruneTurnPorts"] isKindOfClass:[NSNumber class]]) { + NSNumber* pruneTurnPorts = json[@"pruneTurnPorts"]; + config.shouldPruneTurnPorts = [pruneTurnPorts boolValue]; + } + + // presumeWritableWhenFullyRelayed (private api) + if (json[@"presumeWritableWhenFullyRelayed"] != nil && + [json[@"presumeWritableWhenFullyRelayed"] isKindOfClass:[NSNumber class]]) { + NSNumber* presumeWritableWhenFullyRelayed = json[@"presumeWritableWhenFullyRelayed"]; + config.shouldPresumeWritableWhenFullyRelayed = [presumeWritableWhenFullyRelayed boolValue]; + } + + // cryptoOptions (private api) + if (json[@"cryptoOptions"] != nil && + [json[@"cryptoOptions"] isKindOfClass:[NSDictionary class]]) { + id options = json[@"cryptoOptions"]; + BOOL srtpEnableGcmCryptoSuites = NO; + BOOL sframeRequireFrameEncryption = NO; + BOOL srtpEnableEncryptedRtpHeaderExtensions = NO; + BOOL srtpEnableAes128Sha1_32CryptoCipher = NO; + + if (options[@"enableGcmCryptoSuites"] != nil && + [options[@"enableGcmCryptoSuites"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableGcmCryptoSuites"]; + srtpEnableGcmCryptoSuites = [value boolValue]; + } + + if (options[@"requireFrameEncryption"] != nil && + [options[@"requireFrameEncryption"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"requireFrameEncryption"]; + sframeRequireFrameEncryption = [value boolValue]; + } + + if (options[@"enableEncryptedRtpHeaderExtensions"] != nil && + [options[@"enableEncryptedRtpHeaderExtensions"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableEncryptedRtpHeaderExtensions"]; + srtpEnableEncryptedRtpHeaderExtensions = [value boolValue]; + } + + if (options[@"enableAes128Sha1_32CryptoCipher"] != nil && + [options[@"enableAes128Sha1_32CryptoCipher"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableAes128Sha1_32CryptoCipher"]; + srtpEnableAes128Sha1_32CryptoCipher = [value boolValue]; + } + + config.cryptoOptions = [[RTCCryptoOptions alloc] + initWithSrtpEnableGcmCryptoSuites:srtpEnableGcmCryptoSuites + srtpEnableAes128Sha1_32CryptoCipher:srtpEnableAes128Sha1_32CryptoCipher + srtpEnableEncryptedRtpHeaderExtensions:srtpEnableEncryptedRtpHeaderExtensions + sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption]; + } + + return config; +} + +- (RTCDataChannelConfiguration*)RTCDataChannelConfiguration:(id)json { + if (!json) { + return nil; + } + if ([json isKindOfClass:[NSDictionary class]]) { + RTCDataChannelConfiguration* init = [RTCDataChannelConfiguration new]; + + if (json[@"id"]) { + [init setChannelId:(int)[json[@"id"] integerValue]]; + } + if (json[@"ordered"]) { + init.isOrdered = [json[@"ordered"] boolValue]; + } + if (json[@"maxRetransmits"]) { + init.maxRetransmits = [json[@"maxRetransmits"] intValue]; + } + if (json[@"negotiated"]) { + init.isNegotiated = [json[@"negotiated"] boolValue]; + } + if (json[@"protocol"]) { + init.protocol = json[@"protocol"]; + } + return init; + } + return nil; +} + +- (CGRect)parseRect:(NSDictionary*)rect { + return CGRectMake( + [[rect valueForKey:@"left"] doubleValue], [[rect valueForKey:@"top"] doubleValue], + [[rect valueForKey:@"width"] doubleValue], [[rect valueForKey:@"height"] doubleValue]); +} + +- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { + return @{ + @"dtmfSenderId" : Id, + @"interToneGap" : @(dtmf.interToneGap / 1000.0), + @"duration" : @(dtmf.duration / 1000.0), + }; +} + +- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { + NSDictionary* rtcp = @{ + @"cname" : parameters.rtcp.cname, + @"reducedSize" : @(parameters.rtcp.isReducedSize), + }; + + NSMutableArray* headerExtensions = [NSMutableArray array]; + for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { + [headerExtensions addObject:@{ + @"uri" : headerExtension.uri, + @"encrypted" : @(headerExtension.encrypted), + @"id" : @(headerExtension.id), + }]; + } + + NSMutableArray* encodings = [NSMutableArray array]; + for (RTCRtpEncodingParameters* encoding in parameters.encodings) { + // non-nil values + NSMutableDictionary* obj = [@{@"active" : @(encoding.isActive)} mutableCopy]; + // optional values + if (encoding.rid != nil) + [obj setObject:encoding.rid forKey:@"rid"]; + if (encoding.minBitrateBps != nil) + [obj setObject:encoding.minBitrateBps forKey:@"minBitrate"]; + if (encoding.maxBitrateBps != nil) + [obj setObject:encoding.maxBitrateBps forKey:@"maxBitrate"]; + if (encoding.maxFramerate != nil) + [obj setObject:encoding.maxFramerate forKey:@"maxFramerate"]; + if (encoding.numTemporalLayers != nil) + [obj setObject:encoding.numTemporalLayers forKey:@"numTemporalLayers"]; + if (encoding.scaleResolutionDownBy != nil) + [obj setObject:encoding.scaleResolutionDownBy forKey:@"scaleResolutionDownBy"]; + if (encoding.ssrc != nil) + [obj setObject:encoding.ssrc forKey:@"ssrc"]; + + [encodings addObject:obj]; + } + + NSMutableArray* codecs = [NSMutableArray array]; + for (RTCRtpCodecParameters* codec in parameters.codecs) { + [codecs addObject:@{ + @"name" : codec.name, + @"payloadType" : @(codec.payloadType), + @"clockRate" : codec.clockRate, + @"numChannels" : codec.numChannels ? codec.numChannels : @(1), + @"parameters" : codec.parameters, + @"kind" : codec.kind + }]; + } + + NSString *degradationPreference = @"balanced"; + if(parameters.degradationPreference != nil) { + if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceMaintainFramerate ) { + degradationPreference = @"maintain-framerate"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceMaintainResolution) { + degradationPreference = @"maintain-resolution"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceBalanced) { + degradationPreference = @"balanced"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceDisabled) { + degradationPreference = @"disabled"; + } + } + + return @{ + @"transactionId" : parameters.transactionId, + @"rtcp" : rtcp, + @"headerExtensions" : headerExtensions, + @"encodings" : encodings, + @"codecs" : codecs, + @"degradationPreference" : degradationPreference, + }; +} + +- (NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { + switch (state) { + case RTCMediaStreamTrackStateLive: + return @"live"; + case RTCMediaStreamTrackStateEnded: + return @"ended"; + default: + break; + } + return @""; +} + +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream*)stream ownerTag:(NSString*)ownerTag { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + [videoTracks addObject:[self mediaTrackToMap:track]]; + } + + return @{ + @"streamId" : stream.streamId, + @"ownerTag" : ownerTag, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks, + + }; +} + +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { + if (track == nil) + return @{}; + NSDictionary* params = @{ + @"enabled" : @(track.isEnabled), + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"readyState" : [self streamTrackStateToString:track.readyState], + @"remote" : @(YES) + }; + return params; +} + +- (NSDictionary*)rtpSenderToMap:(RTCRtpSender*)sender { + NSDictionary* params = @{ + @"senderId" : sender.senderId, + @"ownsTrack" : @(YES), + @"rtpParameters" : [self rtpParametersToMap:sender.parameters], + @"track" : [self mediaTrackToMap:sender.track], + @"dtmfSender" : [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] + }; + return params; +} + +- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { + NSDictionary* params = @{ + @"receiverId" : receiver.receiverId, + @"rtpParameters" : [self rtpParametersToMap:receiver.parameters], + @"track" : [self mediaTrackToMap:receiver.track], + }; + return params; +} + +- (RTCRtpTransceiver*)getRtpTransceiverById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + NSString *mid = transceiver.mid ? transceiver.mid : @""; + if ([mid isEqualToString:Id]) { + return transceiver; + } + } + return nil; +} + +- (RTCRtpSender*)getRtpSenderById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpSender* sender in peerConnection.senders) { + if ([sender.senderId isEqualToString:Id]) { + return sender; + } + } + return nil; +} + +- (RTCRtpReceiver*)getRtpReceiverById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + if ([receiver.receiverId isEqualToString:Id]) { + return receiver; + } + } + return nil; +} + +- (RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { + RTCRtpEncodingParameters* encoding = [[RTCRtpEncodingParameters alloc] init]; + encoding.isActive = YES; + encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; + encoding.numTemporalLayers = [NSNumber numberWithInt:1]; +#if TARGET_OS_IPHONE + encoding.networkPriority = RTCPriorityLow; + encoding.bitratePriority = 1.0; +#endif + [encoding setRid:map[@"rid"]]; + + if (map[@"active"] != nil) { + [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; + } + + if (map[@"minBitrate"] != nil) { + [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrate"]]; + } + + if (map[@"maxBitrate"] != nil) { + [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrate"]]; + } + + if (map[@"maxFramerate"] != nil) { + [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; + } + + if (map[@"numTemporalLayers"] != nil) { + [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; + } + + if (map[@"scaleResolutionDownBy"] != nil) { + [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; + } + + if (map[@"scalabilityMode"] != nil) { + [encoding setScalabilityMode:(NSString*)map[@"scalabilityMode"]]; + } + + return encoding; +} + +- (RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { + NSArray* streamIds = map[@"streamIds"]; + NSArray* encodingsParams = map[@"sendEncodings"]; + NSString* direction = map[@"direction"]; + + RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; + + if (direction != nil) { + init.direction = [self stringToTransceiverDirection:direction]; + } + + if (streamIds != nil) { + init.streamIds = streamIds; + } + + if (encodingsParams != nil) { + NSMutableArray* sendEncodings = [[NSMutableArray alloc] init]; + for (NSDictionary* map in encodingsParams) { + [sendEncodings addObject:[self mapToEncoding:map]]; + } + [init setSendEncodings:sendEncodings]; + } + return init; +} + +- (RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { + if ([type isEqualToString:@"audio"]) { + return RTCRtpMediaTypeAudio; + } else if ([type isEqualToString:@"video"]) { + return RTCRtpMediaTypeVideo; + } else if ([type isEqualToString:@"data"]) { + return RTCRtpMediaTypeData; + } + return RTCRtpMediaTypeAudio; +} + +- (RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { + if ([type isEqualToString:@"sendrecv"]) { + return RTCRtpTransceiverDirectionSendRecv; + } else if ([type isEqualToString:@"sendonly"]) { + return RTCRtpTransceiverDirectionSendOnly; + } else if ([type isEqualToString:@"recvonly"]) { + return RTCRtpTransceiverDirectionRecvOnly; + } else if ([type isEqualToString:@"inactive"]) { + return RTCRtpTransceiverDirectionInactive; + } + return RTCRtpTransceiverDirectionInactive; +} + +- (RTCRtpParameters*)updateRtpParameters:(RTCRtpParameters*)parameters + with:(NSDictionary*)newParameters { + // current encodings + NSArray* currentEncodings = parameters.encodings; + // new encodings + NSArray* newEncodings = [newParameters objectForKey:@"encodings"]; + + NSString *degradationPreference = [newParameters objectForKey:@"degradationPreference"]; + + if( degradationPreference != nil) { + if( [degradationPreference isEqualToString:@"maintain-framerate"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceMaintainFramerate]; + } else if ([degradationPreference isEqualToString:@"maintain-resolution"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceMaintainResolution]; + } else if ([degradationPreference isEqualToString:@"balanced"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceBalanced]; + } else if ([degradationPreference isEqualToString:@"disabled"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceDisabled]; + } + } + + for (int i = 0; i < [newEncodings count]; i++) { + RTCRtpEncodingParameters* currentParams = nil; + NSDictionary* newParams = [newEncodings objectAtIndex:i]; + NSString* rid = [newParams objectForKey:@"rid"]; + + // update by matching RID + if ([rid isKindOfClass:[NSString class]] && [rid length] != 0) { + // try to find current encoding with same rid + NSUInteger result = + [currentEncodings indexOfObjectPassingTest:^BOOL(RTCRtpEncodingParameters* _Nonnull obj, + NSUInteger idx, BOOL* _Nonnull stop) { + // stop if found object with matching rid + return (*stop = ([rid isEqualToString:obj.rid])); + }]; + + if (result != NSNotFound) { + currentParams = [currentEncodings objectAtIndex:result]; + } + } + + // fall back to update by index + if (currentParams == nil && i < [currentEncodings count]) { + currentParams = [currentEncodings objectAtIndex:i]; + } + + if (currentParams != nil) { + // update values + NSNumber* active = [newParams objectForKey:@"active"]; + if (active != nil) + currentParams.isActive = [active boolValue]; + NSNumber* maxBitrate = [newParams objectForKey:@"maxBitrate"]; + if (maxBitrate != nil) + currentParams.maxBitrateBps = maxBitrate; + NSNumber* minBitrate = [newParams objectForKey:@"minBitrate"]; + if (minBitrate != nil) + currentParams.minBitrateBps = minBitrate; + NSNumber* maxFramerate = [newParams objectForKey:@"maxFramerate"]; + if (maxFramerate != nil) + currentParams.maxFramerate = maxFramerate; + NSNumber* numTemporalLayers = [newParams objectForKey:@"numTemporalLayers"]; + if (numTemporalLayers != nil) + currentParams.numTemporalLayers = numTemporalLayers; + NSNumber* scaleResolutionDownBy = [newParams objectForKey:@"scaleResolutionDownBy"]; + if (scaleResolutionDownBy != nil) + currentParams.scaleResolutionDownBy = scaleResolutionDownBy; + } + } + + return parameters; +} + +- (NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { + switch (direction) { + case RTCRtpTransceiverDirectionSendRecv: + return @"sendrecv"; + case RTCRtpTransceiverDirectionSendOnly: + return @"sendonly"; + case RTCRtpTransceiverDirectionRecvOnly: + return @"recvonly"; + case RTCRtpTransceiverDirectionInactive: + return @"inactive"; + case RTCRtpTransceiverDirectionStopped: + return @"stopped"; + break; + } + return nil; +} + +- (NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { + NSString* mid = transceiver.mid ? transceiver.mid : @""; + NSDictionary* params = @{ + @"transceiverId" : mid, + @"mid" : mid, + @"direction" : [self transceiverDirectionString:transceiver.direction], + @"sender" : [self rtpSenderToMap:transceiver.sender], + @"receiver" : [self receiverToMap:transceiver.receiver] + }; + return params; +} + +- (FlutterRTCVideoRenderer *)findRendererByTrackId:(NSString *)trackId { + for (FlutterRTCVideoRenderer *renderer in self.renders.allValues) { + if (renderer.videoTrack != nil && [renderer.videoTrack.trackId isEqualToString:trackId]) { + return renderer; + } + } + return nil; +} +@end diff --git a/common/darwin/Classes/LocalAudioTrack.h b/common/darwin/Classes/LocalAudioTrack.h new file mode 100644 index 0000000000..7cd1861a06 --- /dev/null +++ b/common/darwin/Classes/LocalAudioTrack.h @@ -0,0 +1,19 @@ +#import +#import "AudioProcessingAdapter.h" +#import "LocalTrack.h" + +@interface LocalAudioTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCAudioTrack* _Nonnull)track; + +@property(nonatomic, strong) RTCAudioTrack* _Nonnull audioTrack; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/common/darwin/Classes/LocalAudioTrack.m b/common/darwin/Classes/LocalAudioTrack.m new file mode 100644 index 0000000000..a080d4f090 --- /dev/null +++ b/common/darwin/Classes/LocalAudioTrack.m @@ -0,0 +1,38 @@ +#import "LocalAudioTrack.h" +#import "AudioManager.h" + +@implementation LocalAudioTrack { + RTCAudioTrack* _track; +} + +@synthesize audioTrack = _track; + +- (instancetype)initWithTrack:(RTCAudioTrack*)track { + self = [super init]; + if (self) { + _track = track; + } + return self; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +- (void)addRenderer:(id)renderer { + [AudioManager.sharedInstance addLocalAudioRenderer:renderer]; +} + +- (void)removeRenderer:(id)renderer { + [AudioManager.sharedInstance removeLocalAudioRenderer:renderer]; +} + +- (void)addProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter addProcessing:processor]; +} + +- (void)removeProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter removeProcessing:processor]; +} + +@end diff --git a/common/darwin/Classes/LocalTrack.h b/common/darwin/Classes/LocalTrack.h new file mode 100644 index 0000000000..34f2e1e29e --- /dev/null +++ b/common/darwin/Classes/LocalTrack.h @@ -0,0 +1,7 @@ +#import + +@protocol LocalTrack + +- (RTCMediaStreamTrack*)track; + +@end diff --git a/common/darwin/Classes/LocalVideoTrack.h b/common/darwin/Classes/LocalVideoTrack.h new file mode 100644 index 0000000000..e28ee76248 --- /dev/null +++ b/common/darwin/Classes/LocalVideoTrack.h @@ -0,0 +1,24 @@ +#import +#import "LocalTrack.h" +#import "VideoProcessingAdapter.h" + +@interface LocalVideoTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track; + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track + videoProcessing:(VideoProcessingAdapter* _Nullable)processing; + +@property(nonatomic, strong) RTCVideoTrack* _Nonnull videoTrack; + +@property(nonatomic, strong) VideoProcessingAdapter* _Nonnull processing; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/common/darwin/Classes/LocalVideoTrack.m b/common/darwin/Classes/LocalVideoTrack.m new file mode 100644 index 0000000000..02ca7c6c40 --- /dev/null +++ b/common/darwin/Classes/LocalVideoTrack.m @@ -0,0 +1,47 @@ +#import "LocalVideoTrack.h" + +@implementation LocalVideoTrack { + RTCVideoTrack* _track; + VideoProcessingAdapter* _processing; +} + +@synthesize videoTrack = _track; +@synthesize processing = _processing; + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + videoProcessing:(VideoProcessingAdapter*)processing { + self = [super init]; + if (self) { + _track = track; + _processing = processing; + } + return self; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track { + return [self initWithTrack:track videoProcessing:nil]; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +/** Register a renderer that will render all frames received on this track. */ +- (void)addRenderer:(id)renderer { + [_track addRenderer:renderer]; +} + +/** Deregister a renderer. */ +- (void)removeRenderer:(id)renderer { + [_track removeRenderer:renderer]; +} + +- (void)addProcessing:(id)processor { + [_processing addProcessing:processor]; +} + +- (void)removeProcessing:(id)processor { + [_processing removeProcessing:processor]; +} + +@end diff --git a/common/darwin/Classes/RTCAudioSource+Private.h b/common/darwin/Classes/RTCAudioSource+Private.h new file mode 100644 index 0000000000..6e45d12fbf --- /dev/null +++ b/common/darwin/Classes/RTCAudioSource+Private.h @@ -0,0 +1,14 @@ +#ifdef __cplusplus +#import "WebRTC/RTCAudioSource.h" +#include "media_stream_interface.h" + +@interface RTCAudioSource () + +/** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + +@end +#endif diff --git a/common/darwin/Classes/VideoProcessingAdapter.h b/common/darwin/Classes/VideoProcessingAdapter.h new file mode 100644 index 0000000000..c953316eec --- /dev/null +++ b/common/darwin/Classes/VideoProcessingAdapter.h @@ -0,0 +1,18 @@ +#import +#import + +@protocol ExternalVideoProcessingDelegate +- (RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)onFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)frame; +@end + +@interface VideoProcessingAdapter : NSObject + +- (_Nonnull instancetype)initWithRTCVideoSource:(RTCVideoSource* _Nonnull)source; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +- (RTCVideoSource* _Nonnull) source; + +@end diff --git a/common/darwin/Classes/VideoProcessingAdapter.m b/common/darwin/Classes/VideoProcessingAdapter.m new file mode 100644 index 0000000000..5b784b8111 --- /dev/null +++ b/common/darwin/Classes/VideoProcessingAdapter.m @@ -0,0 +1,55 @@ +#import "VideoProcessingAdapter.h" +#import + +@implementation VideoProcessingAdapter { + RTCVideoSource* _videoSource; + CGSize _frameSize; + NSArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)initWithRTCVideoSource:(RTCVideoSource*)source { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _videoSource = source; + _processors = [NSArray> new]; + } + return self; +} + +- (RTCVideoSource* _Nonnull) source { + return _videoSource; +} + +- (void)addProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors arrayByAddingObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]]; + os_unfair_lock_unlock(&_lock); +} + +- (void)setSize:(CGSize)size { + _frameSize = size; +} + +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + frame = [processor onFrame:frame]; + } + [_videoSource capturer:capturer didCaptureVideoFrame:frame]; + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/common/darwin/Classes/audio_sink_bridge.cpp b/common/darwin/Classes/audio_sink_bridge.cpp new file mode 100644 index 0000000000..16ce8fa841 --- /dev/null +++ b/common/darwin/Classes/audio_sink_bridge.cpp @@ -0,0 +1,27 @@ +#include "media_stream_interface.h" +#include "FlutterRTCAudioSink-Interface.h" + +class AudioSinkBridge : public webrtc::AudioTrackSinkInterface { +private: + void* sink; + +public: + AudioSinkBridge(void* sink1) { + sink = sink1; + } + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) override + { + RTCAudioSinkCallback(sink, + audio_data, + bits_per_sample, + sample_rate, + number_of_channels, + number_of_frames + ); + }; + int NumPreferredChannels() const override { return 1; } +}; diff --git a/common/darwin/Classes/media_stream_interface.h b/common/darwin/Classes/media_stream_interface.h new file mode 100644 index 0000000000..e25553f9fa --- /dev/null +++ b/common/darwin/Classes/media_stream_interface.h @@ -0,0 +1,199 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/// Source https://webrtc.googlesource.com/src/+/master/api/media_stream_interface.h + +#ifdef __cplusplus +#ifndef API_MEDIA_STREAM_INTERFACE_H_ +#define API_MEDIA_STREAM_INTERFACE_H_ + +#include +#include +#include +#include +#include + +namespace webrtc { + + // Generic observer interface. + class ObserverInterface { + public: + virtual void OnChanged() = 0; + protected: + virtual ~ObserverInterface() {} + }; + class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + virtual ~NotifierInterface() {} + }; + + enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; + // Interfaces where refcounting is part of the public api should + // inherit this abstract interface. The implementation of these + // methods is usually provided by the RefCountedObject template class, + // applied as a leaf in the inheritance tree. + class RefCountInterface { + public: + virtual void AddRef() const = 0; + virtual RefCountReleaseStatus Release() const = 0; + // Non-public destructor, because Release() has exclusive responsibility for + // destroying the object. + protected: + virtual ~RefCountInterface() {} + }; + + // Base class for sources. A MediaStreamTrack has an underlying source that + // provides media. A source can be shared by multiple tracks. + class MediaSourceInterface : public RefCountInterface, + public NotifierInterface { + public: + enum SourceState { kInitializing, kLive, kEnded, kMuted }; + virtual SourceState state() const = 0; + virtual bool remote() const = 0; + protected: + ~MediaSourceInterface() override = default; + }; + + // Interface for receiving audio data from a AudioTrack. + class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + + }; + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* absolute_capture_timestamp_ms) { + // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one + // pure virtual. + return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, + number_of_frames); + } + virtual int NumPreferredChannels() const { return -1; } + protected: + virtual ~AudioTrackSinkInterface() {} + }; + // AudioSourceInterface is a reference counted source used for AudioTracks. + // The same source can be used by multiple AudioTracks. + class AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + protected: + virtual ~AudioObserver() {} + }; + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + // Returns options for the AudioSource. + // (for some of the settings this approach is broken, e.g. setting + // audio network adaptation on the source is the wrong layer of abstraction). +// virtual const AudioOptions options() const; + }; +} +namespace rtc { + + template + class scoped_refptr { + public: + typedef T element_type; + scoped_refptr() : ptr_(nullptr) {} + scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) + explicit scoped_refptr(T* p) : ptr_(p) { + if (ptr_) + ptr_->AddRef(); + } + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) + ptr_->AddRef(); + } + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) + ptr_->AddRef(); + } + // Move constructors. + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + template + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + ~scoped_refptr() { + if (ptr_) + ptr_->Release(); + } + T* get() const { return ptr_; } + explicit operator bool() const { return ptr_ != nullptr; } + T& operator*() const { return *ptr_; } + T* operator->() const { return ptr_; } + // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a + // null pointer, all without touching the reference count of the underlying + // pointed-to object. The object is still reference counted, and the caller of + // release() is now the proud owner of one reference, so it is responsible for + // calling Release() once on the object when no longer using it. + T* release() { + T* retVal = ptr_; + ptr_ = nullptr; + return retVal; + } + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) + p->AddRef(); + if (ptr_) + ptr_->Release(); + ptr_ = p; + return *this; + } + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + template + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + void swap(T** pp) noexcept { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + void swap(scoped_refptr& r) noexcept { swap(&r.ptr_); } + protected: + T* ptr_; + }; +}; + +#endif // API_MEDIA_STREAM_INTERFACE_H_ +#endif // __cplusplus diff --git a/elinux/CMakeLists.txt b/elinux/CMakeLists.txt new file mode 100644 index 0000000000..6ae0ea6390 --- /dev/null +++ b/elinux/CMakeLists.txt @@ -0,0 +1,60 @@ +cmake_minimum_required(VERSION 3.15) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed +set(PLUGIN_NAME "flutter_webrtc_plugin") + +#add_definitions(-DLIB_WEBRTC_API_DLL) +add_definitions(-DRTC_DESKTOP_DEVICE) +add_definitions(-DFLUTTER_ELINUX) + +add_library(${PLUGIN_NAME} SHARED + "../third_party/uuidxx/uuidxx.cc" + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "../common/cpp/src/flutter_common.cc" + "flutter_webrtc_plugin.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/uuidxx" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE + flutter + flutter_wrapper_plugin + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" + PARENT_SCOPE +) + +# Add $ORIGIN to RPATH so that lib/libflutter_webrtc_plugin.so can find lib/libwebrtc.so at runtime +set_property( + TARGET ${PLUGIN_NAME} + PROPERTY BUILD_RPATH + "\$ORIGIN" +) \ No newline at end of file diff --git a/elinux/flutter_webrtc/flutter_web_r_t_c_plugin.h b/elinux/flutter_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..9889514be8 --- /dev/null +++ b/elinux/flutter_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,24 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + + +#if defined(__cplusplus) +extern "C" { +#endif + +FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar); + +#if defined(__cplusplus) +} // extern "C" +#endif + +#endif // PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ diff --git a/elinux/flutter_webrtc_plugin.cc b/elinux/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..4e8a36656c --- /dev/null +++ b/elinux/flutter_webrtc_plugin.cc @@ -0,0 +1,75 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" + +const char* kChannelName = "FlutterWebRTC.Method"; + +//#if defined(_WINDOWS) + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + TaskRunner* task_runner() { return nullptr; } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()) { + webrtc_ = std::make_unique(this); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; +}; + +} // namespace flutter_webrtc_plugin + +void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar) { + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + plugin_registrar); +} diff --git a/example/.gitignore b/example/.gitignore index 78b3c2b336..f70e6e8ce0 100644 --- a/example/.gitignore +++ b/example/.gitignore @@ -29,9 +29,9 @@ .pub-cache/ .pub/ /build/ +.metadata # Web related -lib/generated_plugin_registrant.dart # Symbolication related app.*.symbols diff --git a/example/README.md b/example/README.md index 618b36b261..39eddc96a5 100644 --- a/example/README.md +++ b/example/README.md @@ -26,5 +26,15 @@ flutter run -d macos Web ```bash +dart compile js ../web/e2ee.worker.dart -o web/e2ee.worker.dart.js flutter run -d web ``` + +Windows + +```bash +flutter channel master +flutter create --platforms windows . +flutter run -d windows +``` + diff --git a/example/analysis_options.yaml b/example/analysis_options.yaml index 2d6b313d46..fea5e03d69 100644 --- a/example/analysis_options.yaml +++ b/example/analysis_options.yaml @@ -40,7 +40,3 @@ analyzer: # allow self-reference to deprecated members (we do this because otherwise we have # to annotate every member in every test, assert, etc, when we deprecate something) deprecated_member_use_from_same_package: ignore - # Ignore analyzer hints for updating pubspecs when using Future or - # Stream and not importing dart:async - # Please see https://github.com/flutter/flutter/pull/24528 for details. - sdk_version_async_exported_from_core: ignore diff --git a/example/android/.gitignore b/example/android/.gitignore new file mode 100644 index 0000000000..6f568019d3 --- /dev/null +++ b/example/android/.gitignore @@ -0,0 +1,13 @@ +gradle-wrapper.jar +/.gradle +/captures/ +/gradlew +/gradlew.bat +/local.properties +GeneratedPluginRegistrant.java + +# Remember to never publicly share your keystore. +# See https://flutter.dev/docs/deployment/android#reference-the-keystore-from-the-app +key.properties +**/*.keystore +**/*.jks diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle new file mode 100644 index 0000000000..678af1927a --- /dev/null +++ b/example/android/app/build.gradle @@ -0,0 +1,44 @@ +plugins { + id "com.android.application" + id "kotlin-android" + // The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins. + id "dev.flutter.flutter-gradle-plugin" +} + +android { + namespace = "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example" + compileSdk = flutter.compileSdkVersion + ndkVersion = flutter.ndkVersion + + compileOptions { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = JavaVersion.VERSION_1_8 + } + + defaultConfig { + // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). + applicationId = "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example" + // You can update the following values to match your application needs. + // For more information, see: https://flutter.dev/to/review-gradle-config. + minSdk = flutter.minSdkVersion + targetSdk = flutter.targetSdkVersion + versionCode = flutter.versionCode + versionName = flutter.versionName + } + + buildTypes { + release { + // TODO: Add your own signing config for the release build. + // Signing with the debug keys for now, so `flutter run --release` works. + signingConfig = signingConfigs.debug + } + } +} + +flutter { + source = "../.." +} diff --git a/example/android/app/src/debug/AndroidManifest.xml b/example/android/app/src/debug/AndroidManifest.xml new file mode 100644 index 0000000000..399f6981d5 --- /dev/null +++ b/example/android/app/src/debug/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000000..30096cd127 --- /dev/null +++ b/example/android/app/src/main/AndroidManifest.xml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/android/app/src/main/java/com/cloudwebrtc/flutterflutterexample/flutter_webrtc_example/MainActivity.java b/example/android/app/src/main/java/com/cloudwebrtc/flutterflutterexample/flutter_webrtc_example/MainActivity.java new file mode 100644 index 0000000000..9ff32ddb60 --- /dev/null +++ b/example/android/app/src/main/java/com/cloudwebrtc/flutterflutterexample/flutter_webrtc_example/MainActivity.java @@ -0,0 +1,6 @@ +package com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example; + +import io.flutter.embedding.android.FlutterActivity; + +public class MainActivity extends FlutterActivity { +} diff --git a/example/android/app/src/main/res/drawable-v21/launch_background.xml b/example/android/app/src/main/res/drawable-v21/launch_background.xml new file mode 100644 index 0000000000..f74085f3f6 --- /dev/null +++ b/example/android/app/src/main/res/drawable-v21/launch_background.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/example/android/app/src/main/res/drawable/launch_background.xml b/example/android/app/src/main/res/drawable/launch_background.xml new file mode 100644 index 0000000000..304732f884 --- /dev/null +++ b/example/android/app/src/main/res/drawable/launch_background.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000000..db77bb4b7b Binary files /dev/null and b/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000000..17987b79bb Binary files /dev/null and b/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000000..09d4391482 Binary files /dev/null and b/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000000..d5f1c8d34e Binary files /dev/null and b/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000000..4d6372eebd Binary files /dev/null and b/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/example/android/app/src/main/res/values-night/styles.xml b/example/android/app/src/main/res/values-night/styles.xml new file mode 100644 index 0000000000..06952be745 --- /dev/null +++ b/example/android/app/src/main/res/values-night/styles.xml @@ -0,0 +1,18 @@ + + + + + + + diff --git a/example/android/app/src/main/res/values/styles.xml b/example/android/app/src/main/res/values/styles.xml new file mode 100644 index 0000000000..cb1ef88056 --- /dev/null +++ b/example/android/app/src/main/res/values/styles.xml @@ -0,0 +1,18 @@ + + + + + + + diff --git a/example/android/app/src/profile/AndroidManifest.xml b/example/android/app/src/profile/AndroidManifest.xml new file mode 100644 index 0000000000..399f6981d5 --- /dev/null +++ b/example/android/app/src/profile/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/example/android/build.gradle b/example/android/build.gradle new file mode 100644 index 0000000000..d2ffbffa4c --- /dev/null +++ b/example/android/build.gradle @@ -0,0 +1,18 @@ +allprojects { + repositories { + google() + mavenCentral() + } +} + +rootProject.buildDir = "../build" +subprojects { + project.buildDir = "${rootProject.buildDir}/${project.name}" +} +subprojects { + project.evaluationDependsOn(":app") +} + +tasks.register("clean", Delete) { + delete rootProject.buildDir +} diff --git a/example/android/gradle.properties b/example/android/gradle.properties new file mode 100644 index 0000000000..94adc3a3f9 --- /dev/null +++ b/example/android/gradle.properties @@ -0,0 +1,3 @@ +org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true diff --git a/example/android/gradle/wrapper/gradle-wrapper.properties b/example/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..bc5bce7aa1 --- /dev/null +++ b/example/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Sat Nov 09 20:10:39 CST 2024 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/example/android/settings.gradle b/example/android/settings.gradle new file mode 100644 index 0000000000..4034dd79cc --- /dev/null +++ b/example/android/settings.gradle @@ -0,0 +1,25 @@ +pluginManagement { + def flutterSdkPath = { + def properties = new Properties() + file("local.properties").withInputStream { properties.load(it) } + def flutterSdkPath = properties.getProperty("flutter.sdk") + assert flutterSdkPath != null, "flutter.sdk not set in local.properties" + return flutterSdkPath + }() + + includeBuild("$flutterSdkPath/packages/flutter_tools/gradle") + + repositories { + google() + mavenCentral() + gradlePluginPortal() + } +} + +plugins { + id "dev.flutter.flutter-plugin-loader" version "1.0.0" + id "com.android.application" version "8.3.0" apply false + id "org.jetbrains.kotlin.android" version "1.7.10" apply false +} + +include ":app" \ No newline at end of file diff --git a/example/elinux/.gitignore b/example/elinux/.gitignore new file mode 100644 index 0000000000..229c109991 --- /dev/null +++ b/example/elinux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral/ diff --git a/example/elinux/CMakeLists.txt b/example/elinux/CMakeLists.txt new file mode 100644 index 0000000000..ed844fbcaf --- /dev/null +++ b/example/elinux/CMakeLists.txt @@ -0,0 +1,103 @@ +cmake_minimum_required(VERSION 3.15) +# stop cmake from taking make from CMAKE_SYSROOT +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +project(runner LANGUAGES CXX) + +set(BINARY_NAME "example") + +cmake_policy(SET CMP0063 NEW) + +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Basically we use this include when we got the following error: +# fatal error: 'bits/c++config.h' file not found +include_directories(SYSTEM ${FLUTTER_SYSTEM_INCLUDE_DIRECTORIES}) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) + +# Configure build options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Configure build option to target backend. +if (NOT FLUTTER_TARGET_BACKEND_TYPE) + set(FLUTTER_TARGET_BACKEND_TYPE "wayland" CACHE + STRING "Flutter target backend type" FORCE) + set_property(CACHE FLUTTER_TARGET_BACKEND_TYPE PROPERTY STRINGS + "wayland" "gbm" "eglstream" "x11") +endif() + +# Compilation settings that should be applied to most targets. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_17) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") + +# Flutter library and tool build rules. +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# Application build +add_subdirectory("runner") + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +install(FILES "${FLUTTER_EMBEDDER_LIBRARY}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +if(PLUGIN_BUNDLED_LIBRARIES) + install(FILES "${PLUGIN_BUNDLED_LIBRARIES}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/example/elinux/flutter/CMakeLists.txt b/example/elinux/flutter/CMakeLists.txt new file mode 100644 index 0000000000..f141a3c690 --- /dev/null +++ b/example/elinux/flutter/CMakeLists.txt @@ -0,0 +1,108 @@ +cmake_minimum_required(VERSION 3.15) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper") + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_engine.so") +if(FLUTTER_TARGET_BACKEND_TYPE MATCHES "gbm") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_gbm.so") +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "eglstream") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_eglstream.so") +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "x11") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_x11.so") +else() + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_wayland.so") +endif() + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_EMBEDDER_LIBRARY ${FLUTTER_EMBEDDER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/elinux/" PARENT_SCOPE) +set(AOT_LIBRARY "${EPHEMERAL_DIR}/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "flutter_export.h" + "flutter_plugin_registrar.h" + "flutter_messenger.h" + "flutter_texture_registrar.h" + "flutter_elinux.h" + "flutter_platform_views.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE "${FLUTTER_EMBEDDER_LIBRARY}") +add_dependencies(flutter flutter_assemble) + +# === Wrapper === +list(APPEND CPP_WRAPPER_SOURCES_CORE + "core_implementations.cc" + "standard_codec.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_CORE "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_PLUGIN + "plugin_registrar.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_PLUGIN "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_APP + "flutter_engine.cc" + "flutter_view_controller.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_APP "${WRAPPER_ROOT}/") + +# Wrapper sources needed for a plugin. +add_library(flutter_wrapper_plugin STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} +) +apply_standard_settings(flutter_wrapper_plugin) +set_target_properties(flutter_wrapper_plugin PROPERTIES + POSITION_INDEPENDENT_CODE ON) +set_target_properties(flutter_wrapper_plugin PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_link_libraries(flutter_wrapper_plugin PUBLIC flutter) +target_include_directories(flutter_wrapper_plugin PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_plugin flutter_assemble) + +# Wrapper sources needed for the runner. +add_library(flutter_wrapper_app STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_APP} +) +apply_standard_settings(flutter_wrapper_app) +target_link_libraries(flutter_wrapper_app PUBLIC flutter) +target_include_directories(flutter_wrapper_app PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_app flutter_assemble) + +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + "${FLUTTER_EMBEDDER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} +) diff --git a/example/elinux/flutter/generated_plugin_registrant.cc b/example/elinux/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..8b2a034590 --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.cc @@ -0,0 +1,14 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include + +void RegisterPlugins(flutter::PluginRegistry* registry) { + FlutterWebRTCPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); +} diff --git a/example/elinux/flutter/generated_plugin_registrant.dart b/example/elinux/flutter/generated_plugin_registrant.dart new file mode 100644 index 0000000000..90bf21a80b --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.dart @@ -0,0 +1,10 @@ +// +// Generated file. Do not edit. +// + +// ignore_for_file: lines_longer_than_80_chars + + +// ignore: public_member_api_docs +void registerPlugins() { +} diff --git a/example/elinux/flutter/generated_plugin_registrant.h b/example/elinux/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..a31c23cd89 --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.h @@ -0,0 +1,13 @@ +// +// Generated file. Do not edit. +// + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void RegisterPlugins(flutter::PluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ diff --git a/example/elinux/flutter/generated_plugins.cmake b/example/elinux/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..00419e86ae --- /dev/null +++ b/example/elinux/flutter/generated_plugins.cmake @@ -0,0 +1,16 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/elinux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) diff --git a/example/elinux/runner/CMakeLists.txt b/example/elinux/runner/CMakeLists.txt new file mode 100644 index 0000000000..d15d5ca317 --- /dev/null +++ b/example/elinux/runner/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.15) +project(runner LANGUAGES CXX) + +if(FLUTTER_TARGET_BACKEND_TYPE MATCHES "gbm") + add_definitions(-DFLUTTER_TARGET_BACKEND_GBM) +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "eglstream") + add_definitions(-DFLUTTER_TARGET_BACKEND_EGLSTREAM) +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "x11") + add_definitions(-DFLUTTER_TARGET_BACKEND_X11) +else() + add_definitions(-DFLUTTER_TARGET_BACKEND_WAYLAND) +endif() + +add_executable(${BINARY_NAME} + "flutter_window.cc" + "main.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) +apply_standard_settings(${BINARY_NAME}) +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE flutter flutter_wrapper_app) +target_include_directories(${BINARY_NAME} PRIVATE "${CMAKE_SOURCE_DIR}") +add_dependencies(${BINARY_NAME} flutter_assemble) diff --git a/example/elinux/runner/command_options.h b/example/elinux/runner/command_options.h new file mode 100644 index 0000000000..b0de93165c --- /dev/null +++ b/example/elinux/runner/command_options.h @@ -0,0 +1,402 @@ +// Copyright 2022 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef COMMAND_OPTIONS_ +#define COMMAND_OPTIONS_ + +#include +#include +#include +#include +#include +#include +#include + +namespace commandline { + +namespace { +constexpr char kOptionStyleNormal[] = "--"; +constexpr char kOptionStyleShort[] = "-"; +constexpr char kOptionValueForHelpMessage[] = "="; +} // namespace + +class Exception : public std::exception { + public: + Exception(const std::string& msg) : msg_(msg) {} + ~Exception() throw() {} + + const char* what() const throw() { return msg_.c_str(); } + + private: + std::string msg_; +}; + +class CommandOptions { + public: + CommandOptions() = default; + ~CommandOptions() = default; + + void AddWithoutValue(const std::string& name, + const std::string& short_name, + const std::string& description, + bool required) { + Add(name, short_name, description, "", + ReaderString(), required, false); + } + + void AddInt(const std::string& name, + const std::string& short_name, + const std::string& description, + const int& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderInt(), required, true); + } + + void AddDouble(const std::string& name, + const std::string& short_name, + const std::string& description, + const double& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderDouble(), required, true); + } + + void AddString(const std::string& name, + const std::string& short_name, + const std::string& description, + const std::string& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderString(), required, true); + } + + template + void Add(const std::string& name, + const std::string& short_name, + const std::string& description, + const T default_value, + F reader = F(), + bool required = true, + bool required_value = true) { + if (options_.find(name) != options_.end()) { + std::cerr << "Already registered option: " << name << std::endl; + return; + } + + if (lut_short_options_.find(short_name) != lut_short_options_.end()) { + std::cerr << short_name << "is already registered" << std::endl; + return; + } + lut_short_options_[short_name] = name; + + options_[name] = std::make_unique>( + name, short_name, description, default_value, reader, required, + required_value); + + // register to show help message. + registration_order_options_.push_back(options_[name].get()); + } + + bool Exist(const std::string& name) { + auto itr = options_.find(name); + return itr != options_.end() && itr->second->HasValue(); + } + + template + const T& GetValue(const std::string& name) { + auto itr = options_.find(name); + if (itr == options_.end()) { + throw Exception("Not found: " + name); + } + + auto* option_value = dynamic_cast*>(itr->second.get()); + if (!option_value) { + throw Exception("Type mismatch: " + name); + } + return option_value->GetValue(); + } + + bool Parse(int argc, const char* const* argv) { + if (argc < 1) { + errors_.push_back("No options"); + return false; + } + + command_name_ = argv[0]; + for (auto i = 1; i < argc; i++) { + const std::string arg(argv[i]); + + // normal options: e.g. --bundle=/data/sample/bundle --fullscreen + if (arg.length() > 2 && + arg.substr(0, 2).compare(kOptionStyleNormal) == 0) { + const size_t option_value_len = arg.find("=") != std::string::npos + ? (arg.length() - arg.find("=")) + : 0; + const bool has_value = option_value_len != 0; + std::string option_name = + arg.substr(2, arg.length() - 2 - option_value_len); + + if (options_.find(option_name) == options_.end()) { + errors_.push_back("Not found option: " + option_name); + continue; + } + + if (!has_value && options_[option_name]->IsRequiredValue()) { + errors_.push_back(option_name + " requres an option value"); + continue; + } + + if (has_value && !options_[option_name]->IsRequiredValue()) { + errors_.push_back(option_name + " doesn't requres an option value"); + continue; + } + + if (has_value) { + SetOptionValue(option_name, arg.substr(arg.find("=") + 1)); + } else { + SetOption(option_name); + } + } + // short options: e.g. -f /foo/file.txt -h 640 -abc + else if (arg.length() > 1 && + arg.substr(0, 1).compare(kOptionStyleShort) == 0) { + for (size_t j = 1; j < arg.length(); j++) { + const std::string option_name{argv[i][j]}; + + if (lut_short_options_.find(option_name) == + lut_short_options_.end()) { + errors_.push_back("Not found short option: " + option_name); + break; + } + + if (j == arg.length() - 1 && + options_[lut_short_options_[option_name]]->IsRequiredValue()) { + if (i == argc - 1) { + errors_.push_back("Invalid format option: " + option_name); + break; + } + SetOptionValue(lut_short_options_[option_name], argv[++i]); + } else { + SetOption(lut_short_options_[option_name]); + } + } + } else { + errors_.push_back("Invalid format option: " + arg); + } + } + + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (registration_order_options_[i]->IsRequired() && + !registration_order_options_[i]->HasValue()) { + errors_.push_back( + std::string(registration_order_options_[i]->GetName()) + + " option is mandatory."); + } + } + + return errors_.size() == 0; + } + + std::string GetError() { return errors_.size() > 0 ? errors_[0] : ""; } + + std::vector& GetErrors() { return errors_; } + + std::string ShowHelp() { + std::ostringstream ostream; + + ostream << "Usage: " << command_name_ << " "; + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (registration_order_options_[i]->IsRequired()) { + ostream << registration_order_options_[i]->GetHelpShortMessage() << " "; + } + } + ostream << std::endl; + + ostream << "Global options:" << std::endl; + size_t max_name_len = 0; + for (size_t i = 0; i < registration_order_options_.size(); i++) { + max_name_len = std::max( + max_name_len, registration_order_options_[i]->GetName().length()); + } + + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (!registration_order_options_[i]->GetShortName().empty()) { + ostream << kOptionStyleShort + << registration_order_options_[i]->GetShortName() << ", "; + } else { + ostream << std::string(4, ' '); + } + + size_t index_adjust = 0; + constexpr int kSpacerNum = 10; + auto need_value = registration_order_options_[i]->IsRequiredValue(); + ostream << kOptionStyleNormal + << registration_order_options_[i]->GetName(); + if (need_value) { + ostream << kOptionValueForHelpMessage; + index_adjust += std::string(kOptionValueForHelpMessage).length(); + } + ostream << std::string( + max_name_len + kSpacerNum - index_adjust - + registration_order_options_[i]->GetName().length(), + ' '); + ostream << registration_order_options_[i]->GetDescription() << std::endl; + } + + return ostream.str(); + } + + private: + struct ReaderInt { + int operator()(const std::string& value) { return std::stoi(value); } + }; + + struct ReaderString { + std::string operator()(const std::string& value) { return value; } + }; + + struct ReaderDouble { + double operator()(const std::string& value) { return std::stod(value); } + }; + + class Option { + public: + Option(const std::string& name, + const std::string& short_name, + const std::string& description, + bool required, + bool required_value) + : name_(name), + short_name_(short_name), + description_(description), + is_required_(required), + is_required_value_(required_value), + value_set_(false){}; + virtual ~Option() = default; + + const std::string& GetName() const { return name_; }; + + const std::string& GetShortName() const { return short_name_; }; + + const std::string& GetDescription() const { return description_; }; + + const std::string GetHelpShortMessage() const { + std::string message = kOptionStyleNormal + name_; + if (is_required_value_) { + message += kOptionValueForHelpMessage; + } + return message; + } + + bool IsRequired() const { return is_required_; }; + + bool IsRequiredValue() const { return is_required_value_; }; + + void Set() { value_set_ = true; }; + + virtual bool SetValue(const std::string& value) = 0; + + virtual bool HasValue() const = 0; + + protected: + std::string name_; + std::string short_name_; + std::string description_; + bool is_required_; + bool is_required_value_; + bool value_set_; + }; + + template + class OptionValue : public Option { + public: + OptionValue(const std::string& name, + const std::string& short_name, + const std::string& description, + const T& default_value, + bool required, + bool required_value) + : Option(name, short_name, description, required, required_value), + default_value_(default_value), + value_(default_value){}; + virtual ~OptionValue() = default; + + bool SetValue(const std::string& value) { + value_ = Read(value); + value_set_ = true; + return true; + } + + bool HasValue() const { return value_set_; } + + const T& GetValue() const { return value_; } + + protected: + virtual T Read(const std::string& s) = 0; + + T default_value_; + T value_; + }; + + template + class OptionValueReader : public OptionValue { + public: + OptionValueReader(const std::string& name, + const std::string& short_name, + const std::string& description, + const T default_value, + F reader, + bool required, + bool required_value) + : OptionValue(name, + short_name, + description, + default_value, + required, + required_value), + reader_(reader) {} + ~OptionValueReader() = default; + + private: + T Read(const std::string& value) { return reader_(value); } + + F reader_; + }; + + bool SetOption(const std::string& name) { + auto itr = options_.find(name); + if (itr == options_.end()) { + errors_.push_back("Unknown option: " + name); + return false; + } + + itr->second->Set(); + return true; + } + + bool SetOptionValue(const std::string& name, const std::string& value) { + auto itr = options_.find(name); + if (itr == options_.end()) { + errors_.push_back("Unknown option: " + name); + return false; + } + + if (!itr->second->SetValue(value)) { + errors_.push_back("Invalid option value: " + name + " = " + value); + return false; + } + return true; + } + + std::string command_name_; + std::unordered_map> options_; + std::unordered_map lut_short_options_; + std::vector registration_order_options_; + std::vector errors_; +}; + +} // namespace commandline + +#endif // COMMAND_OPTIONS_ diff --git a/example/elinux/runner/flutter_embedder_options.h b/example/elinux/runner/flutter_embedder_options.h new file mode 100644 index 0000000000..41d0bd10a3 --- /dev/null +++ b/example/elinux/runner/flutter_embedder_options.h @@ -0,0 +1,203 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_EMBEDDER_OPTIONS_ +#define FLUTTER_EMBEDDER_OPTIONS_ + +#include + +#include + +#include "command_options.h" + +class FlutterEmbedderOptions { + public: + FlutterEmbedderOptions() { + options_.AddString("bundle", "b", "Path to Flutter project bundle", + "./bundle", true); + options_.AddWithoutValue("no-cursor", "n", "No mouse cursor/pointer", + false); + options_.AddInt("rotation", "r", + "Window rotation(degree) [0(default)|90|180|270]", 0, + false); + options_.AddDouble("text-scaling-factor", "x", "Text scaling factor", 1.0, + false); + options_.AddWithoutValue("enable-high-contrast", "i", + "Request that UI be rendered with darker colors.", + false); + options_.AddDouble("force-scale-factor", "s", + "Force a scale factor instead using default value", 1.0, + false); + options_.AddWithoutValue( + "async-vblank", "v", + "Don't sync to compositor redraw/vblank (eglSwapInterval 0)", false); + +#if defined(FLUTTER_TARGET_BACKEND_GBM) || \ + defined(FLUTTER_TARGET_BACKEND_EGLSTREAM) + // no more options. +#elif defined(FLUTTER_TARGET_BACKEND_X11) + options_.AddString("title", "t", "Window title", "Flutter", false); + options_.AddWithoutValue("fullscreen", "f", "Always full-screen display", + false); + options_.AddInt("width", "w", "Window width", 1280, false); + options_.AddInt("height", "h", "Window height", 720, false); +#else // FLUTTER_TARGET_BACKEND_WAYLAND + options_.AddString("title", "t", "Window title", "Flutter", false); + options_.AddString("app-id", "a", "XDG App ID", "dev.flutter.elinux", + false); + options_.AddWithoutValue("onscreen-keyboard", "k", + "Enable on-screen keyboard", false); + options_.AddWithoutValue("window-decoration", "d", + "Enable window decorations", false); + options_.AddWithoutValue("fullscreen", "f", "Always full-screen display", + false); + options_.AddInt("width", "w", "Window width", 1280, false); + options_.AddInt("height", "h", "Window height", 720, false); +#endif + } + ~FlutterEmbedderOptions() = default; + + bool Parse(int argc, char** argv) { + if (!options_.Parse(argc, argv)) { + std::cerr << options_.GetError() << std::endl; + std::cout << options_.ShowHelp(); + return false; + } + + bundle_path_ = options_.GetValue("bundle"); + use_mouse_cursor_ = !options_.Exist("no-cursor"); + if (options_.Exist("rotation")) { + switch (options_.GetValue("rotation")) { + case 90: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_90; + break; + case 180: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_180; + break; + case 270: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_270; + break; + default: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_0; + break; + } + } + + text_scale_factor_ = options_.GetValue("text-scaling-factor"); + enable_high_contrast_ = options_.Exist("enable-high-contrast"); + + if (options_.Exist("force-scale-factor")) { + is_force_scale_factor_ = true; + scale_factor_ = options_.GetValue("force-scale-factor"); + } else { + is_force_scale_factor_ = false; + scale_factor_ = 1.0; + } + + enable_vsync_ = !options_.Exist("async-vblank"); + +#if defined(FLUTTER_TARGET_BACKEND_GBM) || \ + defined(FLUTTER_TARGET_BACKEND_EGLSTREAM) + use_onscreen_keyboard_ = false; + use_window_decoration_ = false; + window_view_mode_ = flutter::FlutterViewController::ViewMode::kFullscreen; +#elif defined(FLUTTER_TARGET_BACKEND_X11) + use_onscreen_keyboard_ = false; + use_window_decoration_ = false; + window_title_ = options_.GetValue("title"); + window_view_mode_ = + options_.Exist("fullscreen") + ? flutter::FlutterViewController::ViewMode::kFullscreen + : flutter::FlutterViewController::ViewMode::kNormal; + window_width_ = options_.GetValue("width"); + window_height_ = options_.GetValue("height"); +#else // FLUTTER_TARGET_BACKEND_WAYLAND + window_title_ = options_.GetValue("title"); + window_app_id_ = options_.GetValue("app-id"); + use_onscreen_keyboard_ = options_.Exist("onscreen-keyboard"); + use_window_decoration_ = options_.Exist("window-decoration"); + window_view_mode_ = + options_.Exist("fullscreen") + ? flutter::FlutterViewController::ViewMode::kFullscreen + : flutter::FlutterViewController::ViewMode::kNormal; + window_width_ = options_.GetValue("width"); + window_height_ = options_.GetValue("height"); +#endif + + return true; + } + + std::string BundlePath() const { + return bundle_path_; + } + std::string WindowTitle() const { + return window_title_; + } + std::string WindowAppID() const { + return window_app_id_; + } + bool IsUseMouseCursor() const { + return use_mouse_cursor_; + } + bool IsUseOnscreenKeyboard() const { + return use_onscreen_keyboard_; + } + bool IsUseWindowDecoraation() const { + return use_window_decoration_; + } + flutter::FlutterViewController::ViewMode WindowViewMode() const { + return window_view_mode_; + } + int WindowWidth() const { + return window_width_; + } + int WindowHeight() const { + return window_height_; + } + flutter::FlutterViewController::ViewRotation WindowRotation() const { + return window_view_rotation_; + } + double TextScaleFactor() const { + return text_scale_factor_; + } + bool EnableHighContrast() const { + return enable_high_contrast_; + } + bool IsForceScaleFactor() const { + return is_force_scale_factor_; + } + double ScaleFactor() const { + return scale_factor_; + } + bool EnableVsync() const { + return enable_vsync_; + } + + private: + commandline::CommandOptions options_; + + std::string bundle_path_; + std::string window_title_; + std::string window_app_id_; + bool use_mouse_cursor_ = true; + bool use_onscreen_keyboard_ = false; + bool use_window_decoration_ = false; + flutter::FlutterViewController::ViewMode window_view_mode_ = + flutter::FlutterViewController::ViewMode::kNormal; + int window_width_ = 1280; + int window_height_ = 720; + flutter::FlutterViewController::ViewRotation window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_0; + bool is_force_scale_factor_; + double scale_factor_; + double text_scale_factor_; + bool enable_high_contrast_; + bool enable_vsync_; +}; + +#endif // FLUTTER_EMBEDDER_OPTIONS_ diff --git a/example/elinux/runner/flutter_window.cc b/example/elinux/runner/flutter_window.cc new file mode 100644 index 0000000000..0c5b6397d1 --- /dev/null +++ b/example/elinux/runner/flutter_window.cc @@ -0,0 +1,79 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "flutter_window.h" + +#include +#include +#include +#include + +#include "flutter/generated_plugin_registrant.h" + +FlutterWindow::FlutterWindow( + const flutter::FlutterViewController::ViewProperties view_properties, + const flutter::DartProject project) + : view_properties_(view_properties), project_(project) {} + +bool FlutterWindow::OnCreate() { + flutter_view_controller_ = std::make_unique( + view_properties_, project_); + + // Ensure that basic setup of the controller was successful. + if (!flutter_view_controller_->engine() || + !flutter_view_controller_->view()) { + return false; + } + + // Register Flutter plugins. + RegisterPlugins(flutter_view_controller_->engine()); + + return true; +} + +void FlutterWindow::OnDestroy() { + if (flutter_view_controller_) { + flutter_view_controller_ = nullptr; + } +} + +void FlutterWindow::Run() { + // Main loop. + auto next_flutter_event_time = + std::chrono::steady_clock::time_point::clock::now(); + while (flutter_view_controller_->view()->DispatchEvent()) { + // Wait until the next event. + { + auto wait_duration = + std::max(std::chrono::nanoseconds(0), + next_flutter_event_time - + std::chrono::steady_clock::time_point::clock::now()); + std::this_thread::sleep_for( + std::chrono::duration_cast(wait_duration)); + } + + // Processes any pending events in the Flutter engine, and returns the + // number of nanoseconds until the next scheduled event (or max, if none). + auto wait_duration = flutter_view_controller_->engine()->ProcessMessages(); + { + auto next_event_time = std::chrono::steady_clock::time_point::max(); + if (wait_duration != std::chrono::nanoseconds::max()) { + next_event_time = + std::min(next_event_time, + std::chrono::steady_clock::time_point::clock::now() + + wait_duration); + } else { + // Wait for the next frame if no events. + auto frame_rate = flutter_view_controller_->view()->GetFrameRate(); + next_event_time = std::min( + next_event_time, + std::chrono::steady_clock::time_point::clock::now() + + std::chrono::milliseconds( + static_cast(std::trunc(1000000.0 / frame_rate)))); + } + next_flutter_event_time = + std::max(next_flutter_event_time, next_event_time); + } + } +} diff --git a/example/elinux/runner/flutter_window.h b/example/elinux/runner/flutter_window.h new file mode 100644 index 0000000000..20b9cb8882 --- /dev/null +++ b/example/elinux/runner/flutter_window.h @@ -0,0 +1,34 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_WINDOW_ +#define FLUTTER_WINDOW_ + +#include +#include + +#include + +class FlutterWindow { + public: + explicit FlutterWindow( + const flutter::FlutterViewController::ViewProperties view_properties, + const flutter::DartProject project); + ~FlutterWindow() = default; + + // Prevent copying. + FlutterWindow(FlutterWindow const&) = delete; + FlutterWindow& operator=(FlutterWindow const&) = delete; + + bool OnCreate(); + void OnDestroy(); + void Run(); + + private: + flutter::FlutterViewController::ViewProperties view_properties_; + flutter::DartProject project_; + std::unique_ptr flutter_view_controller_; +}; + +#endif // FLUTTER_WINDOW_ diff --git a/example/elinux/runner/main.cc b/example/elinux/runner/main.cc new file mode 100644 index 0000000000..579daee650 --- /dev/null +++ b/example/elinux/runner/main.cc @@ -0,0 +1,53 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include +#include + +#include +#include +#include + +#include "flutter_embedder_options.h" +#include "flutter_window.h" + +int main(int argc, char** argv) { + FlutterEmbedderOptions options; + if (!options.Parse(argc, argv)) { + return 0; + } + + // Creates the Flutter project. + const auto bundle_path = options.BundlePath(); + const std::wstring fl_path(bundle_path.begin(), bundle_path.end()); + flutter::DartProject project(fl_path); + auto command_line_arguments = std::vector(); + project.set_dart_entrypoint_arguments(std::move(command_line_arguments)); + + flutter::FlutterViewController::ViewProperties view_properties = {}; + view_properties.width = options.WindowWidth(); + view_properties.height = options.WindowHeight(); + view_properties.view_mode = options.WindowViewMode(); + view_properties.view_rotation = options.WindowRotation(); + view_properties.title = options.WindowTitle(); + view_properties.app_id = options.WindowAppID(); + view_properties.use_mouse_cursor = options.IsUseMouseCursor(); + view_properties.use_onscreen_keyboard = options.IsUseOnscreenKeyboard(); + view_properties.use_window_decoration = options.IsUseWindowDecoraation(); + view_properties.text_scale_factor = options.TextScaleFactor(); + view_properties.enable_high_contrast = options.EnableHighContrast(); + view_properties.force_scale_factor = options.IsForceScaleFactor(); + view_properties.scale_factor = options.ScaleFactor(); + view_properties.enable_vsync = options.EnableVsync(); + + // The Flutter instance hosted by this window. + FlutterWindow window(view_properties, project); + if (!window.OnCreate()) { + return 0; + } + window.Run(); + window.OnDestroy(); + + return 0; +} diff --git a/example/ios/.gitignore b/example/ios/.gitignore new file mode 100644 index 0000000000..7a7f9873ad --- /dev/null +++ b/example/ios/.gitignore @@ -0,0 +1,34 @@ +**/dgph +*.mode1v3 +*.mode2v3 +*.moved-aside +*.pbxuser +*.perspectivev3 +**/*sync/ +.sconsign.dblite +.tags* +**/.vagrant/ +**/DerivedData/ +Icon? +**/Pods/ +**/.symlinks/ +profile +xcuserdata +**/.generated/ +Flutter/App.framework +Flutter/Flutter.framework +Flutter/Flutter.podspec +Flutter/Generated.xcconfig +Flutter/ephemeral/ +Flutter/app.flx +Flutter/app.zip +Flutter/flutter_assets/ +Flutter/flutter_export_environment.sh +ServiceDefinitions.json +Runner/GeneratedPluginRegistrant.* + +# Exceptions to above rules. +!default.mode1v3 +!default.mode2v3 +!default.pbxuser +!default.perspectivev3 diff --git a/example/ios/Flutter/AppFrameworkInfo.plist b/example/ios/Flutter/AppFrameworkInfo.plist new file mode 100644 index 0000000000..1dc6cf7652 --- /dev/null +++ b/example/ios/Flutter/AppFrameworkInfo.plist @@ -0,0 +1,26 @@ + + + + + CFBundleDevelopmentRegion + en + CFBundleExecutable + App + CFBundleIdentifier + io.flutter.flutter.app + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + App + CFBundlePackageType + FMWK + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1.0 + MinimumOSVersion + 13.0 + + diff --git a/example/ios/Flutter/Debug.xcconfig b/example/ios/Flutter/Debug.xcconfig new file mode 100644 index 0000000000..ec97fc6f30 --- /dev/null +++ b/example/ios/Flutter/Debug.xcconfig @@ -0,0 +1,2 @@ +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include "Generated.xcconfig" diff --git a/example/ios/Flutter/Release.xcconfig b/example/ios/Flutter/Release.xcconfig new file mode 100644 index 0000000000..c4855bfe20 --- /dev/null +++ b/example/ios/Flutter/Release.xcconfig @@ -0,0 +1,2 @@ +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include "Generated.xcconfig" diff --git a/example/ios/Podfile b/example/ios/Podfile new file mode 100644 index 0000000000..ed16470330 --- /dev/null +++ b/example/ios/Podfile @@ -0,0 +1,41 @@ +# Uncomment this line to define a global platform for your project +platform :ios, '13.0' + +# CocoaPods analytics sends network stats synchronously affecting flutter build latency. +ENV['COCOAPODS_DISABLE_STATS'] = 'true' + +project 'Runner', { + 'Debug' => :debug, + 'Profile' => :release, + 'Release' => :release, +} + +def flutter_root + generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__) + unless File.exist?(generated_xcode_build_settings_path) + raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first" + end + + File.foreach(generated_xcode_build_settings_path) do |line| + matches = line.match(/FLUTTER_ROOT\=(.*)/) + return matches[1].strip if matches + end + raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get" +end + +require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root) + +flutter_ios_podfile_setup + +target 'Runner' do + flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__)) + target 'RunnerTests' do + inherit! :search_paths + end +end + +post_install do |installer| + installer.pods_project.targets.each do |target| + flutter_additional_ios_build_settings(target) + end +end diff --git a/example/ios/Runner.xcodeproj/project.pbxproj b/example/ios/Runner.xcodeproj/project.pbxproj new file mode 100644 index 0000000000..d974dc02cf --- /dev/null +++ b/example/ios/Runner.xcodeproj/project.pbxproj @@ -0,0 +1,608 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 54; + objects = { + +/* Begin PBXBuildFile section */ + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; + 331C80F4294D02FB00263BE5 /* RunnerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 331C80F3294D02FB00263BE5 /* RunnerTests.m */; }; + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; + 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + 331C80F5294D02FB00263BE5 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 97C146E61CF9000F007C117D /* Project object */; + proxyType = 1; + remoteGlobalIDString = 97C146ED1CF9000F007C117D; + remoteInfo = Runner; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 9705A1C41CF9048500538489 /* Embed Frameworks */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + ); + name = "Embed Frameworks"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; + 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + 331C80F3294D02FB00263BE5 /* RunnerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RunnerTests.m; sourceTree = ""; }; + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; + 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; + 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; + 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; + 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; + 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; + 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 331C80EE294D02FB00263BE5 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 97C146EB1CF9000F007C117D /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 331C80F2294D02FB00263BE5 /* RunnerTests */ = { + isa = PBXGroup; + children = ( + 331C80F3294D02FB00263BE5 /* RunnerTests.m */, + ); + path = RunnerTests; + sourceTree = ""; + }; + 9740EEB11CF90186004384FC /* Flutter */ = { + isa = PBXGroup; + children = ( + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, + 9740EEB21CF90195004384FC /* Debug.xcconfig */, + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, + 9740EEB31CF90195004384FC /* Generated.xcconfig */, + ); + name = Flutter; + sourceTree = ""; + }; + 97C146E51CF9000F007C117D = { + isa = PBXGroup; + children = ( + 9740EEB11CF90186004384FC /* Flutter */, + 97C146F01CF9000F007C117D /* Runner */, + 331C80F2294D02FB00263BE5 /* RunnerTests */, + 97C146EF1CF9000F007C117D /* Products */, + ); + sourceTree = ""; + }; + 97C146EF1CF9000F007C117D /* Products */ = { + isa = PBXGroup; + children = ( + 97C146EE1CF9000F007C117D /* Runner.app */, + 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */, + ); + name = Products; + sourceTree = ""; + }; + 97C146F01CF9000F007C117D /* Runner */ = { + isa = PBXGroup; + children = ( + 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */, + 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */, + 97C146FA1CF9000F007C117D /* Main.storyboard */, + 97C146FD1CF9000F007C117D /* Assets.xcassets */, + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */, + 97C147021CF9000F007C117D /* Info.plist */, + 97C146F11CF9000F007C117D /* Supporting Files */, + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */, + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */, + ); + path = Runner; + sourceTree = ""; + }; + 97C146F11CF9000F007C117D /* Supporting Files */ = { + isa = PBXGroup; + children = ( + 97C146F21CF9000F007C117D /* main.m */, + ); + name = "Supporting Files"; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 331C80F0294D02FB00263BE5 /* RunnerTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = 331C80F7294D02FB00263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; + buildPhases = ( + 331C80ED294D02FB00263BE5 /* Sources */, + 331C80EE294D02FB00263BE5 /* Frameworks */, + 331C80EF294D02FB00263BE5 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + 331C80F6294D02FB00263BE5 /* PBXTargetDependency */, + ); + name = RunnerTests; + productName = RunnerTests; + productReference = 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; + 97C146ED1CF9000F007C117D /* Runner */ = { + isa = PBXNativeTarget; + buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; + buildPhases = ( + 9740EEB61CF901F6004384FC /* Run Script */, + 97C146EA1CF9000F007C117D /* Sources */, + 97C146EB1CF9000F007C117D /* Frameworks */, + 97C146EC1CF9000F007C117D /* Resources */, + 9705A1C41CF9048500538489 /* Embed Frameworks */, + 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = Runner; + productName = Runner; + productReference = 97C146EE1CF9000F007C117D /* Runner.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 97C146E61CF9000F007C117D /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 1300; + ORGANIZATIONNAME = ""; + TargetAttributes = { + 331C80F0294D02FB00263BE5 = { + CreatedOnToolsVersion = 14.0; + TestTargetID = 97C146ED1CF9000F007C117D; + }; + 97C146ED1CF9000F007C117D = { + CreatedOnToolsVersion = 7.3.1; + }; + }; + }; + buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 97C146E51CF9000F007C117D; + productRefGroup = 97C146EF1CF9000F007C117D /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 97C146ED1CF9000F007C117D /* Runner */, + 331C80F0294D02FB00263BE5 /* RunnerTests */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 331C80EF294D02FB00263BE5 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 97C146EC1CF9000F007C117D /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */, + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { + isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}", + ); + name = "Thin Binary"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; + }; + 9740EEB61CF901F6004384FC /* Run Script */ = { + isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "Run Script"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 331C80ED294D02FB00263BE5 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 331C80F4294D02FB00263BE5 /* RunnerTests.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 97C146EA1CF9000F007C117D /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */, + 97C146F31CF9000F007C117D /* main.m in Sources */, + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + 331C80F6294D02FB00263BE5 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 97C146ED1CF9000F007C117D /* Runner */; + targetProxy = 331C80F5294D02FB00263BE5 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + 97C146FA1CF9000F007C117D /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 97C146FB1CF9000F007C117D /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 97C147001CF9000F007C117D /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 249021D3217E4FDB00AE95B9 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SUPPORTED_PLATFORMS = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Profile; + }; + 249021D4217E4FDB00AE95B9 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + ENABLE_BITCODE = NO; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Profile; + }; + 331C80F8294D02FB00263BE5 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; + }; + name = Debug; + }; + 331C80F9294D02FB00263BE5 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; + }; + name = Release; + }; + 331C80FA294D02FB00263BE5 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; + }; + name = Profile; + }; + 97C147031CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 97C147041CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SUPPORTED_PLATFORMS = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 97C147061CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + ENABLE_BITCODE = NO; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Debug; + }; + 97C147071CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + ENABLE_BITCODE = NO; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 331C80F7294D02FB00263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 331C80F8294D02FB00263BE5 /* Debug */, + 331C80F9294D02FB00263BE5 /* Release */, + 331C80FA294D02FB00263BE5 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 97C147031CF9000F007C117D /* Debug */, + 97C147041CF9000F007C117D /* Release */, + 249021D3217E4FDB00AE95B9 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 97C147061CF9000F007C117D /* Debug */, + 97C147071CF9000F007C117D /* Release */, + 249021D4217E4FDB00AE95B9 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 97C146E61CF9000F007C117D /* Project object */; +} diff --git a/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000000..919434a625 --- /dev/null +++ b/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme new file mode 100644 index 0000000000..f7213505ac --- /dev/null +++ b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -0,0 +1,98 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/ios/Runner.xcworkspace/contents.xcworkspacedata b/example/ios/Runner.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000000..1d526a16ed --- /dev/null +++ b/example/ios/Runner.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/ios/Runner/AppDelegate.h b/example/ios/Runner/AppDelegate.h new file mode 100644 index 0000000000..36e21bbf9c --- /dev/null +++ b/example/ios/Runner/AppDelegate.h @@ -0,0 +1,6 @@ +#import +#import + +@interface AppDelegate : FlutterAppDelegate + +@end diff --git a/example/ios/Runner/AppDelegate.m b/example/ios/Runner/AppDelegate.m new file mode 100644 index 0000000000..70e83933db --- /dev/null +++ b/example/ios/Runner/AppDelegate.m @@ -0,0 +1,13 @@ +#import "AppDelegate.h" +#import "GeneratedPluginRegistrant.h" + +@implementation AppDelegate + +- (BOOL)application:(UIApplication *)application + didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { + [GeneratedPluginRegistrant registerWithRegistry:self]; + // Override point for customization after application launch. + return [super application:application didFinishLaunchingWithOptions:launchOptions]; +} + +@end diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000000..d36b1fab2d --- /dev/null +++ b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,122 @@ +{ + "images" : [ + { + "size" : "20x20", + "idiom" : "iphone", + "filename" : "Icon-App-20x20@2x.png", + "scale" : "2x" + }, + { + "size" : "20x20", + "idiom" : "iphone", + "filename" : "Icon-App-20x20@3x.png", + "scale" : "3x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@1x.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@3x.png", + "scale" : "3x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "Icon-App-40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "Icon-App-40x40@3x.png", + "scale" : "3x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "Icon-App-60x60@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "Icon-App-60x60@3x.png", + "scale" : "3x" + }, + { + "size" : "20x20", + "idiom" : "ipad", + "filename" : "Icon-App-20x20@1x.png", + "scale" : "1x" + }, + { + "size" : "20x20", + "idiom" : "ipad", + "filename" : "Icon-App-20x20@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "Icon-App-29x29@1x.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "Icon-App-29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "Icon-App-40x40@1x.png", + "scale" : "1x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "Icon-App-40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "Icon-App-76x76@1x.png", + "scale" : "1x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "Icon-App-76x76@2x.png", + "scale" : "2x" + }, + { + "size" : "83.5x83.5", + "idiom" : "ipad", + "filename" : "Icon-App-83.5x83.5@2x.png", + "scale" : "2x" + }, + { + "size" : "1024x1024", + "idiom" : "ios-marketing", + "filename" : "Icon-App-1024x1024@1x.png", + "scale" : "1x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png new file mode 100644 index 0000000000..dc9ada4725 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png new file mode 100644 index 0000000000..7353c41ecf Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png new file mode 100644 index 0000000000..797d452e45 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png new file mode 100644 index 0000000000..6ed2d933e1 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png new file mode 100644 index 0000000000..4cd7b0099c Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png new file mode 100644 index 0000000000..fe730945a0 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png new file mode 100644 index 0000000000..321773cd85 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png new file mode 100644 index 0000000000..797d452e45 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png new file mode 100644 index 0000000000..502f463a9b Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png new file mode 100644 index 0000000000..0ec3034392 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png new file mode 100644 index 0000000000..0ec3034392 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png new file mode 100644 index 0000000000..e9f5fea27c Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png new file mode 100644 index 0000000000..84ac32ae7d Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png new file mode 100644 index 0000000000..8953cba090 Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png new file mode 100644 index 0000000000..0467bf12aa Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json new file mode 100644 index 0000000000..0bedcf2fd4 --- /dev/null +++ b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "idiom" : "universal", + "filename" : "LaunchImage.png", + "scale" : "1x" + }, + { + "idiom" : "universal", + "filename" : "LaunchImage@2x.png", + "scale" : "2x" + }, + { + "idiom" : "universal", + "filename" : "LaunchImage@3x.png", + "scale" : "3x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png new file mode 100644 index 0000000000..9da19eacad Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png differ diff --git a/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png new file mode 100644 index 0000000000..9da19eacad Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png new file mode 100644 index 0000000000..9da19eacad Binary files /dev/null and b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md new file mode 100644 index 0000000000..89c2725b70 --- /dev/null +++ b/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md @@ -0,0 +1,5 @@ +# Launch Screen Assets + +You can customize the launch screen with your own desired assets by replacing the image files in this directory. + +You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images. \ No newline at end of file diff --git a/example/ios/Runner/Base.lproj/LaunchScreen.storyboard b/example/ios/Runner/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 0000000000..f2e259c7c9 --- /dev/null +++ b/example/ios/Runner/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/ios/Runner/Base.lproj/Main.storyboard b/example/ios/Runner/Base.lproj/Main.storyboard new file mode 100644 index 0000000000..f3c28516fb --- /dev/null +++ b/example/ios/Runner/Base.lproj/Main.storyboard @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/ios/Runner/Info.plist b/example/ios/Runner/Info.plist new file mode 100644 index 0000000000..14b908826f --- /dev/null +++ b/example/ios/Runner/Info.plist @@ -0,0 +1,55 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleDisplayName + Flutter Webrtc Example + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + flutter_webrtc_example + CFBundlePackageType + APPL + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleSignature + ???? + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + LSRequiresIPhoneOS + + NSCameraUsageDescription + $(PRODUCT_NAME) Camera Usage! + NSMicrophoneUsageDescription + $(PRODUCT_NAME) Microphone Usage! + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + CADisableMinimumFrameDurationOnPhone + + UIApplicationSupportsIndirectInputEvents + + + diff --git a/example/ios/Runner/main.m b/example/ios/Runner/main.m new file mode 100644 index 0000000000..dff6597e45 --- /dev/null +++ b/example/ios/Runner/main.m @@ -0,0 +1,9 @@ +#import +#import +#import "AppDelegate.h" + +int main(int argc, char* argv[]) { + @autoreleasepool { + return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); + } +} diff --git a/example/ios/RunnerTests/RunnerTests.m b/example/ios/RunnerTests/RunnerTests.m new file mode 100644 index 0000000000..6d8b0bdeec --- /dev/null +++ b/example/ios/RunnerTests/RunnerTests.m @@ -0,0 +1,16 @@ +#import +#import +#import + +@interface RunnerTests : XCTestCase + +@end + +@implementation RunnerTests + +- (void)testExample { + // If you add code to the Runner application, consider adding tests here. + // See https://developer.apple.com/documentation/xctest for more information about using XCTest. +} + +@end diff --git a/example/lib/main.dart b/example/lib/main.dart index 29a2c353d0..64bfc07d0c 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -3,29 +3,48 @@ import 'dart:core'; import 'package:flutter/foundation.dart' show debugDefaultTargetPlatformOverride; import 'package:flutter/material.dart'; +import 'package:flutter_background/flutter_background.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:flutter_webrtc_example/src/capture_frame_sample.dart'; -import 'src/data_channel_sample.dart'; +import 'src/device_enumeration_sample.dart'; import 'src/get_display_media_sample.dart'; import 'src/get_user_media_sample.dart' - if (dart.library.html) 'src/get_user_media_sample_web.dart'; -import 'src/loopback_sample.dart'; + if (dart.library.js_interop) 'src/get_user_media_sample_web.dart'; +import 'src/loopback_data_channel_sample.dart'; +import 'src/loopback_sample_unified_tracks.dart'; import 'src/route_item.dart'; void main() { + WidgetsFlutterBinding.ensureInitialized(); if (WebRTC.platformIsDesktop) { debugDefaultTargetPlatformOverride = TargetPlatform.fuchsia; + } else if (WebRTC.platformIsAndroid) { + //startForegroundService(); } runApp(MyApp()); } +Future startForegroundService() async { + final androidConfig = FlutterBackgroundAndroidConfig( + notificationTitle: 'Title of the notification', + notificationText: 'Text of the notification', + notificationImportance: AndroidNotificationImportance.normal, + notificationIcon: AndroidResource( + name: 'background_icon', + defType: 'drawable'), // Default is ic_launcher from folder mipmap + ); + await FlutterBackground.initialize(androidConfig: androidConfig); + return FlutterBackground.enableBackgroundExecution(); +} + class MyApp extends StatefulWidget { @override _MyAppState createState() => _MyAppState(); } class _MyAppState extends State { - List items; + late List items; @override void initState() { @@ -47,6 +66,7 @@ class _MyAppState extends State { @override Widget build(BuildContext context) { return MaterialApp( + debugShowCheckedModeBanner: false, home: Scaffold( appBar: AppBar( title: Text('Flutter-WebRTC example'), @@ -71,6 +91,15 @@ class _MyAppState extends State { MaterialPageRoute( builder: (BuildContext context) => GetUserMediaSample())); }), + RouteItem( + title: 'Device Enumeration', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + DeviceEnumerationSample())); + }), RouteItem( title: 'GetDisplayMedia', push: (BuildContext context) { @@ -81,20 +110,30 @@ class _MyAppState extends State { GetDisplayMediaSample())); }), RouteItem( - title: 'LoopBack Sample', + title: 'LoopBack Sample (Unified Tracks)', push: (BuildContext context) { Navigator.push( context, MaterialPageRoute( - builder: (BuildContext context) => LoopBackSample())); + builder: (BuildContext context) => + LoopBackSampleUnifiedTracks())); + }), + RouteItem( + title: 'DataChannelLoopBackSample', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + DataChannelLoopBackSample())); }), RouteItem( - title: 'DataChannel', + title: 'Capture Frame', push: (BuildContext context) { Navigator.push( context, MaterialPageRoute( - builder: (BuildContext context) => DataChannelSample())); + builder: (BuildContext context) => CaptureFrameSample())); }), ]; } diff --git a/example/lib/src/capture_frame_sample.dart b/example/lib/src/capture_frame_sample.dart new file mode 100644 index 0000000000..56a3f77464 --- /dev/null +++ b/example/lib/src/capture_frame_sample.dart @@ -0,0 +1,57 @@ +import 'dart:typed_data'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class CaptureFrameSample extends StatefulWidget { + @override + State createState() => _CaptureFrameSample(); +} + +class _CaptureFrameSample extends State { + Uint8List? _data; + + void _captureFrame() async { + final stream = await navigator.mediaDevices.getUserMedia({ + 'audio': false, + 'video': true, + }); + + final track = stream.getVideoTracks().first; + final buffer = await track.captureFrame(); + + stream.getTracks().forEach((track) => track.stop()); + + setState(() { + _data = buffer.asUint8List(); + }); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Capture Frame'), + ), + floatingActionButton: FloatingActionButton( + onPressed: _captureFrame, + child: Icon(Icons.camera_alt_outlined), + ), + body: Builder(builder: (context) { + final data = _data; + + if (data == null) { + return Container(); + } + return Center( + child: Image.memory( + data, + fit: BoxFit.contain, + width: double.infinity, + height: double.infinity, + ), + ); + }), + ); + } +} diff --git a/example/lib/src/data_channel_sample.dart b/example/lib/src/data_channel_sample.dart deleted file mode 100644 index 79ebd3e666..0000000000 --- a/example/lib/src/data_channel_sample.dart +++ /dev/null @@ -1,175 +0,0 @@ -import 'dart:core'; -import 'dart:typed_data'; - -import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; - -class DataChannelSample extends StatefulWidget { - static String tag = 'data_channel_sample'; - - @override - _DataChannelSampleState createState() => _DataChannelSampleState(); -} - -class _DataChannelSampleState extends State { - RTCPeerConnection _peerConnection; - bool _inCalling = false; - - RTCDataChannelInit _dataChannelDict; - RTCDataChannel _dataChannel; - - String _sdp; - - @override - void initState() { - super.initState(); - } - - void _onSignalingState(RTCSignalingState state) { - print(state); - } - - void _onIceGatheringState(RTCIceGatheringState state) { - print(state); - } - - void _onIceConnectionState(RTCIceConnectionState state) { - print(state); - } - - void _onCandidate(RTCIceCandidate candidate) { - print('onCandidate: ' + candidate.candidate); - _peerConnection.addCandidate(candidate); - setState(() { - _sdp += '\n'; - _sdp += candidate.candidate; - }); - } - - void _onRenegotiationNeeded() { - print('RenegotiationNeeded'); - } - - /// Send some sample messages and handle incoming messages. - void _onDataChannel(RTCDataChannel dataChannel) { - dataChannel.onMessage = (message) { - if (message.type == MessageType.text) { - print(message.text); - } else { - // do something with message.binary - } - }; - // or alternatively: - dataChannel.messageStream.listen((message) { - if (message.type == MessageType.text) { - print(message.text); - } else { - // do something with message.binary - } - }); - - dataChannel.send(RTCDataChannelMessage('Hello!')); - dataChannel.send(RTCDataChannelMessage.fromBinary(Uint8List(5))); - } - - // Platform messages are asynchronous, so we initialize in an async method. - void _makeCall() async { - var configuration = { - 'iceServers': [ - {'url': 'stun:stun.l.google.com:19302'}, - ] - }; - - final offerSdpConstraints = { - 'mandatory': { - 'OfferToReceiveAudio': false, - 'OfferToReceiveVideo': false, - }, - 'optional': [], - }; - - final loopbackConstraints = { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': true}, - ], - }; - - if (_peerConnection != null) return; - - try { - _peerConnection = - await createPeerConnection(configuration, loopbackConstraints); - - _peerConnection.onSignalingState = _onSignalingState; - _peerConnection.onIceGatheringState = _onIceGatheringState; - _peerConnection.onIceConnectionState = _onIceConnectionState; - _peerConnection.onIceCandidate = _onCandidate; - _peerConnection.onRenegotiationNeeded = _onRenegotiationNeeded; - - _dataChannelDict = RTCDataChannelInit(); - _dataChannelDict.id = 1; - _dataChannelDict.ordered = true; - _dataChannelDict.maxRetransmitTime = -1; - _dataChannelDict.maxRetransmits = -1; - _dataChannelDict.protocol = 'sctp'; - _dataChannelDict.negotiated = false; - - _dataChannel = await _peerConnection.createDataChannel( - 'dataChannel', _dataChannelDict); - _peerConnection.onDataChannel = _onDataChannel; - - var description = await _peerConnection.createOffer(offerSdpConstraints); - print(description.sdp); - await _peerConnection.setLocalDescription(description); - - _sdp = description.sdp; - //change for loopback. - //description.type = 'answer'; - //_peerConnection.setRemoteDescription(description); - } catch (e) { - print(e.toString()); - } - if (!mounted) return; - - setState(() { - _inCalling = true; - }); - } - - void _hangUp() async { - try { - await _dataChannel.close(); - await _peerConnection.close(); - _peerConnection = null; - } catch (e) { - print(e.toString()); - } - setState(() { - _inCalling = false; - }); - } - - @override - Widget build(BuildContext context) { - return Scaffold( - appBar: AppBar( - title: Text('Data Channel Test'), - ), - body: OrientationBuilder( - builder: (context, orientation) { - return Center( - child: Container( - child: _inCalling ? Text(_sdp) : Text('data channel test'), - ), - ); - }, - ), - floatingActionButton: FloatingActionButton( - onPressed: _inCalling ? _hangUp : _makeCall, - tooltip: _inCalling ? 'Hangup' : 'Call', - child: Icon(_inCalling ? Icons.call_end : Icons.phone), - ), - ); - } -} diff --git a/example/lib/src/device_enumeration_sample.dart b/example/lib/src/device_enumeration_sample.dart new file mode 100644 index 0000000000..4630001572 --- /dev/null +++ b/example/lib/src/device_enumeration_sample.dart @@ -0,0 +1,447 @@ +import 'dart:core'; +import 'package:collection/collection.dart'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:permission_handler/permission_handler.dart'; + +class VideoSize { + VideoSize(this.width, this.height); + + factory VideoSize.fromString(String size) { + final parts = size.split('x'); + return VideoSize(int.parse(parts[0]), int.parse(parts[1])); + } + final int width; + final int height; + + @override + String toString() { + return '$width x $height'; + } +} + +/* + * DeviceEnumerationSample + */ +class DeviceEnumerationSample extends StatefulWidget { + static String tag = 'DeviceEnumerationSample'; + + @override + _DeviceEnumerationSampleState createState() => + _DeviceEnumerationSampleState(); +} + +class _DeviceEnumerationSampleState extends State { + MediaStream? _localStream; + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); + final RTCVideoRenderer _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + List _devices = []; + + List get audioInputs => + _devices.where((device) => device.kind == 'audioinput').toList(); + + List get audioOutputs => + _devices.where((device) => device.kind == 'audiooutput').toList(); + + List get videoInputs => + _devices.where((device) => device.kind == 'videoinput').toList(); + + String? _selectedVideoInputId; + String? _selectedAudioInputId; + + MediaDeviceInfo get selectedAudioInput => audioInputs.firstWhere( + (device) => device.deviceId == _selectedVideoInputId, + orElse: () => audioInputs.first); + + String? _selectedVideoFPS = '30'; + + VideoSize _selectedVideoSize = VideoSize(1280, 720); + + @override + void initState() { + super.initState(); + + initRenderers(); + loadDevices(); + navigator.mediaDevices.ondevicechange = (event) { + loadDevices(); + }; + } + + @override + void deactivate() { + super.deactivate(); + _stop(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; + } + + RTCPeerConnection? pc1; + RTCPeerConnection? pc2; + var senders = []; + + Future initPCs() async { + pc2 ??= await createPeerConnection({}); + pc1 ??= await createPeerConnection({}); + + pc2?.onTrack = (event) { + if (event.track.kind == 'video') { + _remoteRenderer.srcObject = event.streams[0]; + setState(() {}); + } + }; + + pc2?.onConnectionState = (state) { + print('connectionState $state'); + }; + + pc2?.onIceConnectionState = (state) { + print('iceConnectionState $state'); + }; + + await pc2?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, + init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + await pc2?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + + pc1!.onIceCandidate = (candidate) => pc2!.addCandidate(candidate); + pc2!.onIceCandidate = (candidate) => pc1!.addCandidate(candidate); + } + + Future _negotiate() async { + var offer = await pc1?.createOffer(); + await pc1?.setLocalDescription(offer!); + await pc2?.setRemoteDescription(offer!); + var answer = await pc2?.createAnswer(); + await pc2?.setLocalDescription(answer!); + await pc1?.setRemoteDescription(answer!); + } + + Future stopPCs() async { + await pc1?.close(); + await pc2?.close(); + pc1 = null; + pc2 = null; + } + + Future loadDevices() async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + //Ask for runtime permissions if necessary. + var status = await Permission.bluetooth.request(); + if (status.isPermanentlyDenied) { + print('BLEpermdisabled'); + } + + status = await Permission.bluetoothConnect.request(); + if (status.isPermanentlyDenied) { + print('ConnectPermdisabled'); + } + } + final devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _devices = devices; + }); + } + + Future _selectVideoFps(String fps) async { + _selectedVideoFPS = fps; + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectVideoSize(String size) async { + _selectedVideoSize = VideoSize.fromString(size); + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectAudioInput(String? deviceId) async { + _selectedAudioInputId = deviceId; + if (!_inCalling) { + return; + } + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': { + if (_selectedAudioInputId != null && kIsWeb) + 'deviceId': _selectedAudioInputId, + if (_selectedAudioInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedAudioInputId} + ], + }, + 'video': false, + }); + + // replace track. + var newTrack = newLocalStream.getAudioTracks().first; + print('track.settings ' + newTrack.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'audio'); + await sender?.replaceTrack(newTrack); + } + + Future _selectAudioOutput(String? deviceId) async { + if (!_inCalling) { + return; + } + await _localRenderer.audioOutput(deviceId!); + } + + var _speakerphoneOn = false; + + Future _setSpeakerphoneOn() async { + _speakerphoneOn = !_speakerphoneOn; + await Helper.setSpeakerphoneOn(_speakerphoneOn); + setState(() {}); + } + + Future _selectVideoInput(String? deviceId) async { + _selectedVideoInputId = deviceId; + if (!_inCalling) { + return; + } + // 2) replace track. + // stop old track. + _localRenderer.srcObject = null; + + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': false, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + _localStream = newLocalStream; + _localRenderer.srcObject = _localStream; + // replace track. + var newTrack = _localStream?.getVideoTracks().first; + print('track.settings ' + newTrack!.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'video'); + var params = sender!.parameters; + print('params degradationPreference' + + params.degradationPreference.toString()); + params.degradationPreference = RTCDegradationPreference.MAINTAIN_RESOLUTION; + await sender.setParameters(params); + await sender.replaceTrack(newTrack); + } + + Future initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + Future _start() async { + try { + _localStream = await navigator.mediaDevices.getUserMedia({ + 'audio': true, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + _localRenderer.srcObject = _localStream; + _inCalling = true; + + await initPCs(); + + _localStream?.getTracks().forEach((track) async { + var rtpSender = await pc1?.addTrack(track, _localStream!); + print('track.settings ' + track.getSettings().toString()); + senders.add(rtpSender!); + }); + + await _negotiate(); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + Future _stop() async { + try { + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + _localStream = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + senders.clear(); + _inCalling = false; + await stopPCs(); + _speakerphoneOn = false; + await Helper.setSpeakerphoneOn(_speakerphoneOn); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('DeviceEnumerationSample'), + actions: [ + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + if (!WebRTC.platformIsMobile) + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + if (!kIsWeb && WebRTC.platformIsMobile) + IconButton( + disabledColor: Colors.grey, + onPressed: _setSpeakerphoneOn, + icon: Icon( + _speakerphoneOn ? Icons.speaker_phone : Icons.phone_android), + tooltip: 'Switch SpeakerPhone', + ), + PopupMenuButton( + onSelected: _selectVideoInput, + icon: Icon(Icons.switch_camera), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'videoinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + PopupMenuButton( + onSelected: _selectVideoFps, + icon: Icon(Icons.menu), + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + value: _selectedVideoFPS, + child: Text('Select FPS ($_selectedVideoFPS)'), + ), + PopupMenuDivider(), + ...['8', '15', '30', '60'] + .map((fps) => PopupMenuItem( + value: fps, + child: Text(fps), + )) + .toList() + ]; + }, + ), + PopupMenuButton( + onSelected: _selectVideoSize, + icon: Icon(Icons.screenshot_monitor), + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + value: _selectedVideoSize.toString(), + child: Text('Select Video Size ($_selectedVideoSize)'), + ), + PopupMenuDivider(), + ...['320x180', '640x360', '1280x720', '1920x1080'] + .map((fps) => PopupMenuItem( + value: fps, + child: Text(fps), + )) + .toList() + ]; + }, + ), + ], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + width: MediaQuery.of(context).size.width, + color: Colors.white10, + child: Row( + children: [ + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer), + ), + ), + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_remoteRenderer), + ), + ), + ], + )), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: () { + _inCalling ? _stop() : _start(); + }, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/get_display_media_sample.dart b/example/lib/src/get_display_media_sample.dart index 8ed60287ef..cb74238fb6 100644 --- a/example/lib/src/get_display_media_sample.dart +++ b/example/lib/src/get_display_media_sample.dart @@ -1,8 +1,10 @@ -import 'dart:async'; import 'dart:core'; +import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; +import 'package:flutter_background/flutter_background.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:flutter_webrtc_example/src/widgets/screen_select_dialog.dart'; /* * getDisplayMedia sample @@ -15,11 +17,10 @@ class GetDisplayMediaSample extends StatefulWidget { } class _GetDisplayMediaSampleState extends State { - MediaStream _localStream; - final _localRenderer = RTCVideoRenderer(); + MediaStream? _localStream; + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); bool _inCalling = false; - Timer _timer; - var _counter = 0; + DesktopCapturerSource? selected_source_; @override void initState() { @@ -31,9 +32,8 @@ class _GetDisplayMediaSampleState extends State { void deactivate() { super.deactivate(); if (_inCalling) { - _hangUp(); + _stop(); } - if (_timer != null) _timer.cancel(); _localRenderer.dispose(); } @@ -41,18 +41,73 @@ class _GetDisplayMediaSampleState extends State { await _localRenderer.initialize(); } - void handleTimer(Timer timer) async { - setState(() { - _counter++; - }); + Future selectScreenSourceDialog(BuildContext context) async { + if (WebRTC.platformIsDesktop) { + final source = await showDialog( + context: context, + builder: (context) => ScreenSelectDialog(), + ); + if (source != null) { + await _makeCall(source); + } + } else { + if (WebRTC.platformIsAndroid) { + // Android specific + Future requestBackgroundPermission([bool isRetry = false]) async { + // Required for android screenshare. + try { + var hasPermissions = await FlutterBackground.hasPermissions; + if (!isRetry) { + const androidConfig = FlutterBackgroundAndroidConfig( + notificationTitle: 'Screen Sharing', + notificationText: 'LiveKit Example is sharing the screen.', + notificationImportance: AndroidNotificationImportance.normal, + notificationIcon: AndroidResource( + name: 'livekit_ic_launcher', defType: 'mipmap'), + ); + hasPermissions = await FlutterBackground.initialize( + androidConfig: androidConfig); + } + if (hasPermissions && + !FlutterBackground.isBackgroundExecutionEnabled) { + await FlutterBackground.enableBackgroundExecution(); + } + } catch (e) { + if (!isRetry) { + return await Future.delayed(const Duration(seconds: 1), + () => requestBackgroundPermission(true)); + } + print('could not publish video: $e'); + } + } + + await requestBackgroundPermission(); + } + await _makeCall(null); + } } // Platform messages are asynchronous, so we initialize in an async method. - Future _makeCall() async { - final mediaConstraints = {'audio': false, 'video': true}; + Future _makeCall(DesktopCapturerSource? source) async { + setState(() { + selected_source_ = source; + }); try { - var stream = await MediaDevices.getDisplayMedia(mediaConstraints); + var stream = + await navigator.mediaDevices.getDisplayMedia({ + 'video': selected_source_ == null + ? true + : { + 'deviceId': {'exact': selected_source_!.id}, + 'mandatory': {'frameRate': 30.0} + } + }); + stream.getVideoTracks()[0].onEnded = () { + print( + 'By adding a listener on onEnded you can: 1) catch stop video sharing on Web'); + }; + _localStream = stream; _localRenderer.srcObject = _localStream; } catch (e) { @@ -63,49 +118,59 @@ class _GetDisplayMediaSampleState extends State { setState(() { _inCalling = true; }); - - _timer = Timer.periodic(Duration(milliseconds: 100), handleTimer); } - void _hangUp() async { + Future _stop() async { try { - await _localStream.dispose(); + if (kIsWeb) { + _localStream?.getTracks().forEach((track) => track.stop()); + } + await _localStream?.dispose(); + _localStream = null; _localRenderer.srcObject = null; } catch (e) { print(e.toString()); } + } + + Future _hangUp() async { + await _stop(); setState(() { _inCalling = false; }); - _timer.cancel(); } @override Widget build(BuildContext context) { return Scaffold( appBar: AppBar( - title: Text('GetUserMedia API Test'), + title: Text('GetDisplayMedia source: ' + + (selected_source_ != null ? selected_source_!.name : '')), + actions: [], ), body: OrientationBuilder( builder: (context, orientation) { return Center( + child: Container( + width: MediaQuery.of(context).size.width, + color: Colors.white10, child: Stack(children: [ - Center( - child: Text('counter: ' + _counter.toString()), - ), - Container( - margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: MediaQuery.of(context).size.width, - height: MediaQuery.of(context).size.height, - child: RTCVideoView(_localRenderer), - decoration: BoxDecoration(color: Colors.black54), - ) + if (_inCalling) + Container( + margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), + width: MediaQuery.of(context).size.width, + height: MediaQuery.of(context).size.height, + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer), + ) ]), - ); + )); }, ), floatingActionButton: FloatingActionButton( - onPressed: _inCalling ? _hangUp : _makeCall, + onPressed: () { + _inCalling ? _hangUp() : selectScreenSourceDialog(context); + }, tooltip: _inCalling ? 'Hangup' : 'Call', child: Icon(_inCalling ? Icons.call_end : Icons.phone), ), diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index 25dd9fc5f9..9c2dea81d0 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -1,8 +1,11 @@ import 'dart:core'; import 'dart:io'; +import 'dart:math'; +import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:gallery_saver_plus/gallery_saver.dart'; import 'package:path_provider/path_provider.dart'; /* @@ -16,17 +19,26 @@ class GetUserMediaSample extends StatefulWidget { } class _GetUserMediaSampleState extends State { - MediaStream _localStream; + MediaStream? _localStream; final _localRenderer = RTCVideoRenderer(); bool _inCalling = false; bool _isTorchOn = false; - MediaRecorder _mediaRecorder; + bool _isFrontCamera = true; + MediaRecorder? _mediaRecorder; + String? _mediaRecorderFilePath; + bool get _isRec => _mediaRecorder != null; + List? _mediaDevicesList; + @override void initState() { super.initState(); initRenderers(); + navigator.mediaDevices.ondevicechange = (event) async { + print('++++++ ondevicechange ++++++'); + _mediaDevicesList = await navigator.mediaDevices.enumerateDevices(); + }; } @override @@ -36,6 +48,7 @@ class _GetUserMediaSampleState extends State { _hangUp(); } _localRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; } void initRenderers() async { @@ -49,8 +62,8 @@ class _GetUserMediaSampleState extends State { 'video': { 'mandatory': { 'minWidth': - '1280', // Provide your own width, height and frame rate here - 'minHeight': '720', + '640', // Provide your own width, height and frame rate here + 'minHeight': '480', 'minFrameRate': '30', }, 'facingMode': 'user', @@ -59,7 +72,8 @@ class _GetUserMediaSampleState extends State { }; try { - var stream = await MediaDevices.getUserMedia(mediaConstraints); + var stream = await navigator.mediaDevices.getUserMedia(mediaConstraints); + _mediaDevicesList = await navigator.mediaDevices.enumerateDevices(); _localStream = stream; _localRenderer.srcObject = _localStream; } catch (e) { @@ -74,45 +88,90 @@ class _GetUserMediaSampleState extends State { void _hangUp() async { try { - await _localStream.dispose(); + if (kIsWeb) { + _localStream?.getTracks().forEach((track) => track.stop()); + } + await _localStream?.dispose(); _localRenderer.srcObject = null; + setState(() { + _inCalling = false; + }); } catch (e) { print(e.toString()); } - setState(() { - _inCalling = false; - }); } void _startRecording() async { - if (Platform.isIOS) { - print('Recording is not available on iOS'); - return; - } + if (_localStream == null) throw Exception('Stream is not initialized'); // TODO(rostopira): request write storage permission - final storagePath = await getExternalStorageDirectory(); - final filePath = storagePath.path + '/webrtc_sample/test.mp4'; - _mediaRecorder = MediaRecorder(); + final timestamp = DateTime.now().millisecondsSinceEpoch; + + if (!(Platform.isAndroid || Platform.isIOS || Platform.isMacOS)) { + throw 'Unsupported platform'; + } + + final tempDir = await getTemporaryDirectory(); + if (!(await tempDir.exists())) { + await tempDir.create(recursive: true); + } + + _mediaRecorderFilePath = '${tempDir.path}/$timestamp.mp4'; + + if (_mediaRecorderFilePath == null) { + throw Exception('Can\'t find storagePath'); + } + + final file = File(_mediaRecorderFilePath!); + if (await file.exists()) { + await file.delete(); + } + _mediaRecorder = MediaRecorder(albumName: 'FlutterWebRTC'); setState(() {}); - await _localStream.getMediaTracks(); - final videoTrack = _localStream + + final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); - await _mediaRecorder.start( - filePath, + + await _mediaRecorder!.start( + _mediaRecorderFilePath!, videoTrack: videoTrack, + audioChannel: RecorderAudioChannel.OUTPUT, ); } void _stopRecording() async { + if (_mediaRecorderFilePath == null) { + return; + } + + // album name works only for android, for ios use gallerySaver await _mediaRecorder?.stop(); setState(() { _mediaRecorder = null; }); + + // this is only for ios, android already saves to albumName + await GallerySaver.saveVideo( + _mediaRecorderFilePath!, + albumName: 'FlutterWebRTC', + ); + + _mediaRecorderFilePath = null; + } + + void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) { + final point = Point( + details.localPosition.dx / constraints.maxWidth, + details.localPosition.dy / constraints.maxHeight, + ); + Helper.setFocusPoint(_localStream!.getVideoTracks().first, point); + Helper.setExposurePoint(_localStream!.getVideoTracks().first, point); } void _toggleTorch() async { - final videoTrack = _localStream + if (_localStream == null) throw Exception('Stream is not initialized'); + + final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); final has = await videoTrack.hasTorch(); @@ -126,27 +185,48 @@ class _GetUserMediaSampleState extends State { } } - void _toggleCamera() async { - final videoTrack = _localStream + void setZoom(double zoomLevel) async { + if (_localStream == null) throw Exception('Stream is not initialized'); + // await videoTrack.setZoom(zoomLevel); //Use it after published webrtc_interface 1.1.1 + + // before the release, use can just call native method directly. + final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); - await videoTrack.switchCamera(); + await Helper.setZoom(videoTrack, zoomLevel); + } + + void _switchCamera() async { + if (_localStream == null) throw Exception('Stream is not initialized'); + + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + await Helper.switchCamera(videoTrack); + setState(() { + _isFrontCamera = _isFrontCamera; + }); } void _captureFrame() async { - String filePath; - if (Platform.isAndroid) { - final storagePath = await getExternalStorageDirectory(); - filePath = storagePath.path + '/webrtc_sample/test.jpg'; - } else { - final storagePath = await getApplicationDocumentsDirectory(); - filePath = storagePath.path + '/test${DateTime.now()}.jpg'; - } + if (_localStream == null) throw Exception('Stream is not initialized'); - final videoTrack = _localStream + final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); - await videoTrack.captureFrame(filePath); + final frame = await videoTrack.captureFrame(); + await showDialog( + context: context, + builder: (context) => AlertDialog( + content: + Image.memory(frame.asUint8List(), height: 720, width: 1280), + actions: [ + TextButton( + onPressed: Navigator.of(context, rootNavigator: true).pop, + child: Text('OK'), + ) + ], + )); } @override @@ -162,7 +242,7 @@ class _GetUserMediaSampleState extends State { ), IconButton( icon: Icon(Icons.switch_video), - onPressed: _toggleCamera, + onPressed: _switchCamera, ), IconButton( icon: Icon(Icons.camera), @@ -172,20 +252,48 @@ class _GetUserMediaSampleState extends State { icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record), onPressed: _isRec ? _stopRecording : _startRecording, ), + PopupMenuButton( + onSelected: _selectAudioOutput, + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), ] : null, ), body: OrientationBuilder( builder: (context, orientation) { return Center( - child: Container( - margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: MediaQuery.of(context).size.width, - height: MediaQuery.of(context).size.height, - child: RTCVideoView(_localRenderer, mirror: true), - decoration: BoxDecoration(color: Colors.black54), - ), - ); + child: Container( + margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), + width: MediaQuery.of(context).size.width, + height: MediaQuery.of(context).size.height, + decoration: BoxDecoration(color: Colors.black54), + child: LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return GestureDetector( + onScaleStart: (details) {}, + onScaleUpdate: (details) { + if (details.scale != 1.0) { + setZoom(details.scale); + } + }, + onTapDown: (TapDownDetails details) => + onViewFinderTap(details, constraints), + child: RTCVideoView(_localRenderer, mirror: false), + ); + }), + )); }, ), floatingActionButton: FloatingActionButton( @@ -195,4 +303,8 @@ class _GetUserMediaSampleState extends State { ), ); } + + void _selectAudioOutput(String deviceId) { + _localRenderer.audioOutput(deviceId); + } } diff --git a/example/lib/src/get_user_media_sample_web.dart b/example/lib/src/get_user_media_sample_web.dart index 9d1b50a976..7df81cafad 100644 --- a/example/lib/src/get_user_media_sample_web.dart +++ b/example/lib/src/get_user_media_sample_web.dart @@ -1,10 +1,12 @@ // ignore: uri_does_not_exist import 'dart:core'; -import 'dart:html' as html; +import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:web/web.dart' as web; + /* * getUserMedia sample */ @@ -16,20 +18,24 @@ class GetUserMediaSample extends StatefulWidget { } class _GetUserMediaSampleState extends State { - MediaStream _localStream; + MediaStream? _localStream; final _localRenderer = RTCVideoRenderer(); bool _inCalling = false; - MediaRecorder _mediaRecorder; + MediaRecorder? _mediaRecorder; + + List? _cameras; + bool get _isRec => _mediaRecorder != null; - List cameras; + List? cameras; @override void initState() { super.initState(); initRenderers(); - MediaDevices.getSources().then((md) { + + navigator.mediaDevices.enumerateDevices().then((md) { setState(() { - cameras = md.where((d) => d['kind'] == 'videoinput').toList(); + cameras = md.where((d) => d.kind == 'videoinput').toList(); }); }); } @@ -38,7 +44,7 @@ class _GetUserMediaSampleState extends State { void deactivate() { super.deactivate(); if (_inCalling) { - _hangUp(); + _stop(); } _localRenderer.dispose(); } @@ -62,7 +68,8 @@ class _GetUserMediaSampleState extends State { }; try { - var stream = await MediaDevices.getUserMedia(mediaConstraints); + var stream = await navigator.mediaDevices.getUserMedia(mediaConstraints); + _cameras = await Helper.cameras; _localStream = stream; _localRenderer.srcObject = _localStream; } catch (e) { @@ -75,22 +82,31 @@ class _GetUserMediaSampleState extends State { }); } - void _hangUp() async { + Future _stop() async { try { - await _localStream.dispose(); + if (kIsWeb) { + _localStream?.getTracks().forEach((track) => track.stop()); + } + await _localStream?.dispose(); + _localStream = null; _localRenderer.srcObject = null; } catch (e) { print(e.toString()); } + } + + void _hangUp() async { + await _stop(); setState(() { _inCalling = false; }); } void _startRecording() async { + if (_localStream == null) throw Exception('Can\'t record without a stream'); _mediaRecorder = MediaRecorder(); setState(() {}); - _mediaRecorder.startWeb(_localStream); + _mediaRecorder?.startWeb(_localStream!); } void _stopRecording() async { @@ -99,22 +115,25 @@ class _GetUserMediaSampleState extends State { _mediaRecorder = null; }); print(objectUrl); - html.window.open(objectUrl, '_blank'); + // ignore: unsafe_html + web.window.open(objectUrl, '_blank'); } void _captureFrame() async { - final videoTrack = _localStream + if (_localStream == null) throw Exception('Can\'t record without a stream'); + final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); final frame = await videoTrack.captureFrame(); await showDialog( context: context, builder: (context) => AlertDialog( - content: Image.network(frame, height: 720, width: 1280), + content: + Image.memory(frame.asUint8List(), height: 720, width: 1280), actions: [ - FlatButton( - child: Text('OK'), + TextButton( onPressed: Navigator.of(context, rootNavigator: true).pop, + child: Text('OK'), ) ], )); @@ -124,7 +143,7 @@ class _GetUserMediaSampleState extends State { Widget build(BuildContext context) { return Scaffold( appBar: AppBar( - title: Text('GetUserMedia API Test'), + title: Text('GetUserMedia API Test Web'), actions: _inCalling ? [ IconButton( @@ -135,6 +154,25 @@ class _GetUserMediaSampleState extends State { icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record), onPressed: _isRec ? _stopRecording : _startRecording, ), + PopupMenuButton( + onSelected: _switchCamera, + itemBuilder: (BuildContext context) { + if (_cameras != null) { + return _cameras!.map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } else { + return []; + } + }, + ), + // IconButton( + // icon: Icon(Icons.settings), + // onPressed: _switchCamera, + // ) ] : null, ), @@ -145,8 +183,8 @@ class _GetUserMediaSampleState extends State { margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), width: MediaQuery.of(context).size.width, height: MediaQuery.of(context).size.height, - child: RTCVideoView(_localRenderer, mirror: true), decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer, mirror: true), ), ); }, @@ -158,4 +196,12 @@ class _GetUserMediaSampleState extends State { ), ); } + + void _switchCamera(String deviceId) async { + if (_localStream == null) return; + + await Helper.switchCamera( + _localStream!.getVideoTracks()[0], deviceId, _localStream); + setState(() {}); + } } diff --git a/example/lib/src/loopback_data_channel_sample.dart b/example/lib/src/loopback_data_channel_sample.dart new file mode 100644 index 0000000000..ba02873324 --- /dev/null +++ b/example/lib/src/loopback_data_channel_sample.dart @@ -0,0 +1,158 @@ +import 'dart:async'; +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class DataChannelLoopBackSample extends StatefulWidget { + static String tag = 'data_channel_sample'; + + @override + _DataChannelLoopBackSampleState createState() => + _DataChannelLoopBackSampleState(); +} + +class _DataChannelLoopBackSampleState extends State { + RTCPeerConnection? _peerConnection1; + RTCPeerConnection? _peerConnection2; + RTCDataChannel? _dataChannel1; + RTCDataChannel? _dataChannel2; + String _dataChannel1Status = ''; + String _dataChannel2Status = ''; + + bool _inCalling = false; + + void _makeCall() async { + if (_peerConnection1 != null || _peerConnection2 != null) return; + + try { + _peerConnection1 = await createPeerConnection({'iceServers': []}); + _peerConnection2 = await createPeerConnection({'iceServers': []}); + + _peerConnection1!.onIceCandidate = (candidate) { + print('peerConnection1: onIceCandidate: ${candidate.candidate}'); + _peerConnection2!.addCandidate(candidate); + }; + + _peerConnection2!.onIceCandidate = (candidate) { + print('peerConnection2: onIceCandidate: ${candidate.candidate}'); + _peerConnection1!.addCandidate(candidate); + }; + + _dataChannel1 = await _peerConnection1!.createDataChannel( + 'peerConnection1-dc', RTCDataChannelInit()..id = 1); + + _peerConnection2!.onDataChannel = (channel) { + _dataChannel2 = channel; + _dataChannel2!.onDataChannelState = (state) { + setState(() { + _dataChannel2Status += '\ndataChannel2: state: ${state.toString()}'; + }); + }; + _dataChannel2!.onMessage = (data) async { + var bufferedAmount = await _dataChannel2!.getBufferedAmount(); + setState(() { + _dataChannel2Status += + '\ndataChannel2: Received message: ${data.text}, bufferedAmount: $bufferedAmount'; + }); + + await _dataChannel2!.send(RTCDataChannelMessage( + '(dataChannel2 ==> dataChannel1) Hello from dataChannel2 echo !!!')); + }; + }; + + _dataChannel1!.onDataChannelState = (state) { + setState(() { + _dataChannel1Status += '\ndataChannel1: state: ${state.toString()}'; + }); + if (state == RTCDataChannelState.RTCDataChannelOpen) { + _dataChannel1!.send(RTCDataChannelMessage( + '(dataChannel1 ==> dataChannel2) Hello from dataChannel1 !!!')); + } + }; + + _dataChannel1!.onMessage = (data) async { + var bufferedAmount = await _dataChannel2!.getBufferedAmount(); + _dataChannel1Status += + '\ndataChannel1: Received message: ${data.text}, bufferedAmount: $bufferedAmount'; + setState(() {}); + }; + + var offer = await _peerConnection1!.createOffer({}); + print('peerConnection1 offer: ${offer.sdp}'); + + await _peerConnection2!.setRemoteDescription(offer); + var answer = await _peerConnection2!.createAnswer(); + print('peerConnection2 answer: ${answer.sdp}'); + + await _peerConnection1!.setLocalDescription(offer); + await _peerConnection2!.setLocalDescription(answer); + + await _peerConnection1!.setRemoteDescription(answer); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + void _hangUp() async { + try { + await _dataChannel1?.close(); + setState(() { + _dataChannel1Status += '\n _dataChannel1.close()'; + }); + await _dataChannel2?.close(); + await _peerConnection1?.close(); + await _peerConnection2?.close(); + _peerConnection1 = null; + _peerConnection2 = null; + } catch (e) { + print(e.toString()); + } + setState(() { + _inCalling = false; + }); + + Timer(const Duration(seconds: 1), () { + if (mounted) { + setState(() { + _dataChannel1Status = ''; + _dataChannel2Status = ''; + }); + } + }); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('Data Channel Test'), + ), + body: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Text('(caller)data channel 1:\n'), + Container( + child: Text(_dataChannel1Status), + ), + Text('\n\n(callee)data channel 2:\n'), + Container( + child: Text(_dataChannel2Status), + ), + ], + ), + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _hangUp : _makeCall, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart deleted file mode 100644 index ee60ab50f0..0000000000 --- a/example/lib/src/loopback_sample.dart +++ /dev/null @@ -1,230 +0,0 @@ -import 'dart:async'; -import 'dart:core'; - -import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; - -class LoopBackSample extends StatefulWidget { - static String tag = 'loopback_sample'; - - @override - _MyAppState createState() => _MyAppState(); -} - -class _MyAppState extends State { - MediaStream _localStream; - RTCPeerConnection _peerConnection; - final _localRenderer = RTCVideoRenderer(); - final _remoteRenderer = RTCVideoRenderer(); - bool _inCalling = false; - Timer _timer; - - @override - void initState() { - super.initState(); - initRenderers(); - } - - @override - void deactivate() { - super.deactivate(); - if (_inCalling) { - _hangUp(); - } - _localRenderer.dispose(); - _remoteRenderer.dispose(); - } - - void initRenderers() async { - await _localRenderer.initialize(); - await _remoteRenderer.initialize(); - } - - void handleStatsReport(Timer timer) async { - if (_peerConnection != null) { - var reports = await _peerConnection.getStats(); - reports.forEach((report) { - print('report => { '); - print(' id: ' + report.id + ','); - print(' type: ' + report.type + ','); - print(' timestamp: ${report.timestamp},'); - print(' values => {'); - report.values.forEach((key, value) { - print(' ' + key + ' : ' + value.toString() + ', '); - }); - print(' }'); - print('}'); - }); - } - } - - void _onSignalingState(RTCSignalingState state) { - print(state); - } - - void _onIceGatheringState(RTCIceGatheringState state) { - print(state); - } - - void _onIceConnectionState(RTCIceConnectionState state) { - print(state); - } - - void _onAddStream(MediaStream stream) { - print('addStream: ' + stream.id); - _remoteRenderer.srcObject = stream; - } - - void _onRemoveStream(MediaStream stream) { - _remoteRenderer.srcObject = null; - } - - void _onCandidate(RTCIceCandidate candidate) { - print('onCandidate: ' + candidate.candidate); - _peerConnection.addCandidate(candidate); - } - - void _onRenegotiationNeeded() { - print('RenegotiationNeeded'); - } - - // Platform messages are asynchronous, so we initialize in an async method. - void _makeCall() async { - final mediaConstraints = { - 'audio': true, - 'video': { - 'mandatory': { - 'minWidth': - '1280', // Provide your own width, height and frame rate here - 'minHeight': '720', - 'minFrameRate': '30', - }, - 'facingMode': 'user', - 'optional': [], - } - }; - - var configuration = { - 'iceServers': [ - {'url': 'stun:stun.l.google.com:19302'}, - ] - }; - - final offerSdpConstraints = { - 'mandatory': { - 'OfferToReceiveAudio': true, - 'OfferToReceiveVideo': true, - }, - 'optional': [], - }; - - final loopbackConstraints = { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': false}, - ], - }; - - if (_peerConnection != null) return; - - try { - _localStream = await MediaDevices.getUserMedia(mediaConstraints); - _localRenderer.srcObject = _localStream; - _peerConnection = - await createPeerConnection(configuration, loopbackConstraints); - - _peerConnection.onSignalingState = _onSignalingState; - _peerConnection.onIceGatheringState = _onIceGatheringState; - _peerConnection.onIceConnectionState = _onIceConnectionState; - _peerConnection.onAddStream = _onAddStream; - _peerConnection.onRemoveStream = _onRemoveStream; - _peerConnection.onIceCandidate = _onCandidate; - _peerConnection.onRenegotiationNeeded = _onRenegotiationNeeded; - - await _peerConnection.addStream(_localStream); - var description = await _peerConnection.createOffer(offerSdpConstraints); - print(description.sdp); - await _peerConnection.setLocalDescription(description); - //change for loopback. - description.type = 'answer'; - await _peerConnection.setRemoteDescription(description); - } catch (e) { - print(e.toString()); - } - if (!mounted) return; - - _timer = Timer.periodic(Duration(seconds: 1), handleStatsReport); - - setState(() { - _inCalling = true; - }); - } - - void _hangUp() async { - try { - await _localStream.dispose(); - await _peerConnection.close(); - _peerConnection = null; - _localRenderer.srcObject = null; - _remoteRenderer.srcObject = null; - } catch (e) { - print(e.toString()); - } - setState(() { - _inCalling = false; - }); - _timer.cancel(); - } - - void _sendDtmf() async { - var dtmfSender = - _peerConnection.createDtmfSender(_localStream.getAudioTracks()[0]); - await dtmfSender.sendDtmf('123#'); - } - - @override - Widget build(BuildContext context) { - var widgets = [ - Expanded( - child: RTCVideoView(_localRenderer, mirror: true), - ), - Expanded( - child: RTCVideoView(_remoteRenderer), - ) - ]; - return Scaffold( - appBar: AppBar( - title: Text('LoopBack example'), - actions: _inCalling - ? [ - IconButton( - icon: Icon(Icons.keyboard), - onPressed: _sendDtmf, - ), - ] - : null, - ), - body: OrientationBuilder( - builder: (context, orientation) { - return Center( - child: Container( - decoration: BoxDecoration(color: Colors.black54), - child: orientation == Orientation.portrait - ? Column( - mainAxisAlignment: MainAxisAlignment.spaceEvenly, - children: widgets) - : Row( - mainAxisAlignment: MainAxisAlignment.spaceEvenly, - children: widgets), - ), - ); - }, - ), - floatingActionButton: FloatingActionButton( - onPressed: _inCalling ? _hangUp : _makeCall, - tooltip: _inCalling ? 'Hangup' : 'Call', - child: Icon(_inCalling ? Icons.call_end : Icons.phone), - ), - ); - } -} diff --git a/example/lib/src/loopback_sample_unified_tracks.dart b/example/lib/src/loopback_sample_unified_tracks.dart new file mode 100644 index 0000000000..ba84cabbf2 --- /dev/null +++ b/example/lib/src/loopback_sample_unified_tracks.dart @@ -0,0 +1,903 @@ +import 'dart:async'; +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class LoopBackSampleUnifiedTracks extends StatefulWidget { + static String tag = 'loopback_sample_unified_tracks'; + + @override + _MyAppState createState() => _MyAppState(); +} + +const List audioCodecList = [ + 'OPUS', + 'ISAC', + 'PCMA', + 'PCMU', + 'G729' +]; +const List videoCodecList = ['VP8', 'VP9', 'H264', 'AV1']; + +class _MyAppState extends State { + String audioDropdownValue = audioCodecList.first; + String videoDropdownValue = videoCodecList.first; + RTCRtpCapabilities? acaps; + RTCRtpCapabilities? vcaps; + MediaStream? _localStream; + RTCPeerConnection? _localPeerConnection; + RTCPeerConnection? _remotePeerConnection; + RTCRtpSender? _videoSender; + RTCRtpSender? _audioSender; + final _localRenderer = RTCVideoRenderer(); + final _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + bool _micOn = false; + bool _cameraOn = false; + bool _speakerOn = false; + bool _audioEncrypt = false; + bool _videoEncrypt = false; + bool _audioDecrypt = false; + bool _videoDecrypt = false; + List? _mediaDevicesList; + final FrameCryptorFactory _frameCyrptorFactory = frameCryptorFactory; + KeyProvider? _keySharedProvider; + final Map _frameCyrptors = {}; + Timer? _timer; + final _configuration = { + 'iceServers': [ + {'urls': 'stun:stun.l.google.com:19302'}, + ], + 'sdpSemantics': 'unified-plan', + 'encodedInsertableStreams': true, + }; + + final _constraints = { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': false}, + ], + }; + + final demoRatchetSalt = 'flutter-webrtc-ratchet-salt'; + + final aesKey = Uint8List.fromList([ + 200, + 244, + 58, + 72, + 214, + 245, + 86, + 82, + 192, + 127, + 23, + 153, + 167, + 172, + 122, + 234, + 140, + 70, + 175, + 74, + 61, + 11, + 134, + 58, + 185, + 102, + 172, + 17, + 11, + 6, + 119, + 253 + ]); + + @override + void initState() { + print('Init State'); + super.initState(); + + _refreshMediaDevices(); + navigator.mediaDevices.ondevicechange = (event) async { + print('++++++ ondevicechange ++++++'); + var devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _mediaDevicesList = devices; + }); + }; + } + + @override + void deactivate() { + super.deactivate(); + navigator.mediaDevices.ondevicechange = null; + _cleanUp(); + } + + Future _refreshMediaDevices() async { + var devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _mediaDevicesList = devices; + }); + } + + void _selectAudioOutput(String deviceId) async { + await _localRenderer.audioOutput(deviceId); + } + + void _selectAudioInput(String deviceId) async { + if (!WebRTC.platformIsWeb) { + await Helper.selectAudioInput(deviceId); + } + } + + void _cleanUp() async { + try { + await _localStream?.dispose(); + await _remotePeerConnection?.close(); + await _remotePeerConnection?.dispose(); + _remotePeerConnection = null; + await _localPeerConnection?.close(); + await _localPeerConnection?.dispose(); + _localPeerConnection = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + await _localRenderer.dispose(); + await _remoteRenderer.dispose(); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + setState(() { + _inCalling = false; + _cameraOn = false; + _micOn = false; + }); + } + + void initRenderers() async { + print('Init Renderers'); + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + void initLocalConnection() async { + if (_localPeerConnection != null) return; + try { + var pc = await createPeerConnection(_configuration, _constraints); + + pc.onSignalingState = (state) async { + var state2 = await pc.getSignalingState(); + print('local pc: onSignalingState($state), state2($state2)'); + }; + + pc.onIceGatheringState = (state) async { + var state2 = await pc.getIceGatheringState(); + print('local pc: onIceGatheringState($state), state2($state2)'); + }; + pc.onIceConnectionState = (state) async { + var state2 = await pc.getIceConnectionState(); + print('local pc: onIceConnectionState($state), state2($state2)'); + }; + pc.onConnectionState = (state) async { + var state2 = await pc.getConnectionState(); + print('local pc: onConnectionState($state), state2($state2)'); + }; + + pc.onIceCandidate = _onLocalCandidate; + pc.onRenegotiationNeeded = _onLocalRenegotiationNeeded; + + _localPeerConnection = pc; + } catch (e) { + print(e.toString()); + } + } + + void _onLocalCandidate(RTCIceCandidate localCandidate) async { + print('onLocalCandidate: ${localCandidate.candidate}'); + try { + var candidate = RTCIceCandidate( + localCandidate.candidate!, + localCandidate.sdpMid!, + localCandidate.sdpMLineIndex!, + ); + await _remotePeerConnection!.addCandidate(candidate); + } catch (e) { + print( + 'Unable to add candidate ${localCandidate.candidate} to remote connection'); + } + } + + void _onRemoteCandidate(RTCIceCandidate remoteCandidate) async { + print('onRemoteCandidate: ${remoteCandidate.candidate}'); + try { + var candidate = RTCIceCandidate( + remoteCandidate.candidate!, + remoteCandidate.sdpMid!, + remoteCandidate.sdpMLineIndex!, + ); + await _localPeerConnection!.addCandidate(candidate); + } catch (e) { + print( + 'Unable to add candidate ${remoteCandidate.candidate} to local connection'); + } + } + + void _onTrack(RTCTrackEvent event) async { + print('onTrack ${event.track.id}'); + + if (event.track.kind == 'video') { + setState(() { + _remoteRenderer.srcObject = event.streams[0]; + }); + } + } + + void _onLocalRenegotiationNeeded() { + print('LocalRenegotiationNeeded'); + } + + void _onRemoteRenegotiationNeeded() { + print('RemoteRenegotiationNeeded'); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _makeCall() async { + initRenderers(); + initLocalConnection(); + + var keyProviderOptions = KeyProviderOptions( + sharedKey: true, + ratchetSalt: Uint8List.fromList(demoRatchetSalt.codeUnits), + ratchetWindowSize: 16, + failureTolerance: -1, + ); + + _keySharedProvider ??= + await _frameCyrptorFactory.createDefaultKeyProvider(keyProviderOptions); + await _keySharedProvider?.setSharedKey(key: aesKey); + acaps = await getRtpSenderCapabilities('audio'); + print('sender audio capabilities: ${acaps!.toMap()}'); + + vcaps = await getRtpSenderCapabilities('video'); + print('sender video capabilities: ${vcaps!.toMap()}'); + + if (_remotePeerConnection != null) return; + + try { + var pc = await createPeerConnection(_configuration, _constraints); + + pc.onTrack = _onTrack; + + pc.onSignalingState = (state) async { + var state2 = await pc.getSignalingState(); + print('remote pc: onSignalingState($state), state2($state2)'); + }; + + pc.onIceGatheringState = (state) async { + var state2 = await pc.getIceGatheringState(); + print('remote pc: onIceGatheringState($state), state2($state2)'); + }; + pc.onIceConnectionState = (state) async { + var state2 = await pc.getIceConnectionState(); + print('remote pc: onIceConnectionState($state), state2($state2)'); + }; + pc.onConnectionState = (state) async { + var state2 = await pc.getConnectionState(); + print('remote pc: onConnectionState($state), state2($state2)'); + }; + + pc.onIceCandidate = _onRemoteCandidate; + pc.onRenegotiationNeeded = _onRemoteRenegotiationNeeded; + _remotePeerConnection = pc; + await _negotiate(); + } catch (e) { + print(e.toString()); + } + + if (!mounted) return; + setState(() { + _inCalling = true; + }); + } + + Future _negotiate() async { + final oaConstraints = { + 'mandatory': { + 'OfferToReceiveAudio': true, + 'OfferToReceiveVideo': true, + }, + 'optional': [], + }; + + if (_remotePeerConnection == null) return; + + var offer = await _localPeerConnection!.createOffer({}); + await _localPeerConnection!.setLocalDescription(offer); + var localDescription = await _localPeerConnection!.getLocalDescription(); + + await _remotePeerConnection!.setRemoteDescription(localDescription!); + var answer = await _remotePeerConnection!.createAnswer(oaConstraints); + await _remotePeerConnection!.setLocalDescription(answer); + var remoteDescription = await _remotePeerConnection!.getLocalDescription(); + + await _localPeerConnection!.setRemoteDescription(remoteDescription!); + } + + void _enableEncryption({bool video = false, bool enabled = true}) async { + var senders = await _localPeerConnection?.senders; + + var kind = video ? 'video' : 'audio'; + + senders?.forEach((element) async { + if (kind != element.track?.kind) return; + + var trackId = element.track?.id; + var id = kind + '_' + trackId! + '_sender'; + if (!_frameCyrptors.containsKey(id)) { + var frameCyrptor = + await _frameCyrptorFactory.createFrameCryptorForRtpSender( + participantId: id, + sender: element, + algorithm: Algorithm.kAesGcm, + keyProvider: _keySharedProvider!); + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('EN onFrameCryptorStateChanged $participantId $state'); + _frameCyrptors[id] = frameCyrptor; + await frameCyrptor.setKeyIndex(0); + } + + var _frameCyrptor = _frameCyrptors[id]; + await _frameCyrptor?.setEnabled(enabled); + await _frameCyrptor?.updateCodec( + kind == 'video' ? videoDropdownValue : audioDropdownValue); + }); + } + + void _enableDecryption({bool video = false, bool enabled = true}) async { + var receivers = await _remotePeerConnection?.receivers; + var kind = video ? 'video' : 'audio'; + receivers?.forEach((element) async { + if (kind != element.track?.kind) return; + var trackId = element.track?.id; + var id = kind + '_' + trackId! + '_receiver'; + if (!_frameCyrptors.containsKey(id)) { + var frameCyrptor = + await _frameCyrptorFactory.createFrameCryptorForRtpReceiver( + participantId: id, + receiver: element, + algorithm: Algorithm.kAesGcm, + keyProvider: _keySharedProvider!); + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('DE onFrameCryptorStateChanged $participantId $state'); + _frameCyrptors[id] = frameCyrptor; + await frameCyrptor.setKeyIndex(0); + } + + var _frameCyrptor = _frameCyrptors[id]; + await _frameCyrptor?.setEnabled(enabled); + await _frameCyrptor?.updateCodec( + kind == 'video' ? videoDropdownValue : audioDropdownValue); + }); + } + + void _hangUp() async { + try { + await _remotePeerConnection?.close(); + await _remotePeerConnection?.dispose(); + _remotePeerConnection = null; + _remoteRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + setState(() { + _inCalling = false; + }); + } + + void _ratchetKey() async { + var newKey = await _keySharedProvider?.ratchetSharedKey(index: 0); + print('newKey $newKey'); + } + + Map _getMediaConstraints({audio = true, video = true}) { + return { + 'audio': audio ? true : false, + 'video': video + ? { + 'mandatory': { + 'minWidth': '640', + 'minHeight': '480', + 'minFrameRate': '30', + }, + 'facingMode': 'user', + 'optional': [], + } + : false, + }; + } + + void _sendDtmf() async { + var dtmfSender = _audioSender?.dtmfSender; + await dtmfSender?.insertDTMF('123#'); + } + + void _startVideo() async { + var newStream = await navigator.mediaDevices + .getUserMedia(_getMediaConstraints(audio: false, video: true)); + if (_localStream != null) { + await _removeExistingVideoTrack(); + var tracks = newStream.getVideoTracks(); + for (var newTrack in tracks) { + await _localStream!.addTrack(newTrack); + } + } else { + _localStream = newStream; + } + + await _addOrReplaceVideoTracks(); + + var transceivers = await _localPeerConnection?.getTransceivers(); + transceivers?.forEach((transceiver) { + if (transceiver.sender.senderId != _videoSender?.senderId) return; + var codecs = vcaps?.codecs + ?.where((element) => element.mimeType + .toLowerCase() + .contains(videoDropdownValue.toLowerCase())) + .toList() ?? + []; + transceiver.setCodecPreferences(codecs); + }); + await _negotiate(); + + setState(() { + _localRenderer.srcObject = _localStream; + _cameraOn = true; + }); + + _timer?.cancel(); + _timer = Timer.periodic(Duration(seconds: 1), (timer) async { + //handleStatsReport(timer); + }); + } + + void _stopVideo() async { + _frameCyrptors.removeWhere((key, value) { + if (key.startsWith('video')) { + value.dispose(); + return true; + } + return false; + }); + + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + + await _removeExistingVideoTrack(fromConnection: true); + await _negotiate(); + setState(() { + _localRenderer.srcObject = null; + // onMute/onEnded/onUnmute are not wired up so having to force this here + _remoteRenderer.srcObject = null; + _cameraOn = false; + }); + _timer?.cancel(); + _timer = null; + } + + void _startAudio() async { + var newStream = await navigator.mediaDevices + .getUserMedia(_getMediaConstraints(audio: true, video: false)); + + if (_localStream != null) { + await _removeExistingAudioTrack(); + for (var newTrack in newStream.getAudioTracks()) { + await _localStream!.addTrack(newTrack); + } + } else { + _localStream = newStream; + } + + await _addOrReplaceAudioTracks(); + var transceivers = await _localPeerConnection?.getTransceivers(); + transceivers?.forEach((transceiver) { + if (transceiver.sender.senderId != _audioSender?.senderId) return; + var codecs = acaps?.codecs + ?.where((element) => element.mimeType + .toLowerCase() + .contains(audioDropdownValue.toLowerCase())) + .toList() ?? + []; + transceiver.setCodecPreferences(codecs); + }); + await _negotiate(); + setState(() { + _micOn = true; + }); + } + + void _stopAudio() async { + _frameCyrptors.removeWhere((key, value) { + if (key.startsWith('audio')) { + value.dispose(); + return true; + } + return false; + }); + await _removeExistingAudioTrack(fromConnection: true); + await _negotiate(); + setState(() { + _micOn = false; + }); + } + + void _switchSpeaker() async { + setState(() { + _speakerOn = !_speakerOn; + if (!WebRTC.platformIsWeb) { + Helper.setSpeakerphoneOn(_speakerOn); + } + }); + } + + void handleStatsReport(Timer timer) async { + if (_remotePeerConnection != null && _remoteRenderer.srcObject != null) { + var reports = await _remotePeerConnection + ?.getStats(_remoteRenderer.srcObject!.getVideoTracks().first); + reports?.forEach((report) { + print('report => { '); + print(' id: ' + report.id + ','); + print(' type: ' + report.type + ','); + print(' timestamp: ${report.timestamp},'); + print(' values => {'); + report.values.forEach((key, value) { + print(' ' + key + ' : ' + value.toString() + ', '); + }); + print(' }'); + print('}'); + }); + + /* + var senders = await _peerConnection.getSenders(); + var canInsertDTMF = await senders[0].dtmfSender.canInsertDtmf(); + print(canInsertDTMF); + await senders[0].dtmfSender.insertDTMF('1'); + var receivers = await _peerConnection.getReceivers(); + print(receivers[0].track.id); + var transceivers = await _peerConnection.getTransceivers(); + print(transceivers[0].sender.parameters); + print(transceivers[0].receiver.parameters); + */ + } + } + + Future _removeExistingVideoTrack({bool fromConnection = false}) async { + var tracks = _localStream!.getVideoTracks(); + for (var i = tracks.length - 1; i >= 0; i--) { + var track = tracks[i]; + if (fromConnection) { + await _connectionRemoveTrack(track); + } + try { + await _localStream!.removeTrack(track); + } catch (e) { + print(e.toString()); + } + await track.stop(); + } + } + + Future _removeExistingAudioTrack({bool fromConnection = false}) async { + var tracks = _localStream!.getAudioTracks(); + for (var i = tracks.length - 1; i >= 0; i--) { + var track = tracks[i]; + if (fromConnection) { + await _connectionRemoveTrack(track); + } + try { + await _localStream!.removeTrack(track); + } catch (e) { + print(e.toString()); + } + await track.stop(); + } + } + + Future _addOrReplaceVideoTracks() async { + for (var track in _localStream!.getVideoTracks()) { + await _connectionAddTrack(track, _localStream!); + } + } + + Future _addOrReplaceAudioTracks() async { + for (var track in _localStream!.getAudioTracks()) { + await _connectionAddTrack(track, _localStream!); + } + } + + Future _connectionAddTrack( + MediaStreamTrack track, MediaStream stream) async { + var sender = track.kind == 'video' ? _videoSender : _audioSender; + if (sender != null) { + print('Have a Sender of kind:${track.kind}'); + var trans = await _getSendersTransceiver(sender.senderId); + if (trans != null) { + print('Setting direction and replacing track with new track'); + await trans.setDirection(TransceiverDirection.SendOnly); + await trans.sender.replaceTrack(track); + } + } else { + if (track.kind == 'video') { + _videoSender = await _localPeerConnection!.addTrack(track, stream); + } else { + _audioSender = await _localPeerConnection!.addTrack(track, stream); + } + } + } + + Future _connectionRemoveTrack(MediaStreamTrack track) async { + var sender = track.kind == 'video' ? _videoSender : _audioSender; + if (sender != null) { + print('Have a Sender of kind:${track.kind}'); + var trans = await _getSendersTransceiver(sender.senderId); + if (trans != null) { + print('Setting direction and replacing track with null'); + await trans.setDirection(TransceiverDirection.Inactive); + await trans.sender.replaceTrack(null); + } + } + } + + Future _getSendersTransceiver(String senderId) async { + RTCRtpTransceiver? foundTrans; + var trans = await _localPeerConnection!.getTransceivers(); + for (var tran in trans) { + if (tran.sender.senderId == senderId) { + foundTrans = tran; + break; + } + } + return foundTrans; + } + + @override + Widget build(BuildContext context) { + var widgets = [ + Expanded( + child: Container( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + Row( + children: [ + Text('audio codec:'), + DropdownButton( + value: audioDropdownValue, + icon: const Icon( + Icons.arrow_drop_down, + color: Colors.blue, + ), + elevation: 16, + style: const TextStyle(color: Colors.blue), + underline: Container( + height: 2, + color: Colors.blueAccent, + ), + onChanged: (String? value) { + // This is called when the user selects an item. + setState(() { + audioDropdownValue = value!; + }); + }, + items: audioCodecList + .map>((String value) { + return DropdownMenuItem( + value: value, + child: Text(value), + ); + }).toList(), + ), + Text('video codec:'), + DropdownButton( + value: videoDropdownValue, + icon: const Icon( + Icons.arrow_drop_down, + color: Colors.blue, + ), + elevation: 16, + style: const TextStyle(color: Colors.blue), + underline: Container( + height: 2, + color: Colors.blueAccent, + ), + onChanged: (String? value) { + // This is called when the user selects an item. + setState(() { + videoDropdownValue = value!; + }); + }, + items: videoCodecList + .map>((String value) { + return DropdownMenuItem( + value: value, + child: Text(value), + ); + }).toList(), + ), + TextButton(onPressed: _ratchetKey, child: Text('Ratchet Key')) + ], + ), + Row( + children: [ + Text('audio encrypt:'), + Switch( + value: _audioEncrypt, + onChanged: (value) { + setState(() { + _audioEncrypt = value; + _enableEncryption(video: false, enabled: _audioEncrypt); + }); + }), + Text('video encrypt:'), + Switch( + value: _videoEncrypt, + onChanged: (value) { + setState(() { + _videoEncrypt = value; + _enableEncryption(video: true, enabled: _videoEncrypt); + }); + }) + ], + ), + Expanded( + child: RTCVideoView(_localRenderer, mirror: true), + ), + ], + )), + ), + Expanded( + child: Container( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + Row( + children: [ + Text('audio decrypt:'), + Switch( + value: _audioDecrypt, + onChanged: (value) { + setState(() { + _audioDecrypt = value; + _enableDecryption(video: false, enabled: _audioDecrypt); + }); + }), + Text('video decrypt:'), + Switch( + value: _videoDecrypt, + onChanged: (value) { + setState(() { + _videoDecrypt = value; + _enableDecryption(video: true, enabled: _videoDecrypt); + }); + }) + ], + ), + Expanded( + child: RTCVideoView(_remoteRenderer), + ), + ], + )), + ) + ]; + return Scaffold( + appBar: AppBar( + title: Text('LoopBack Unified Tracks example'), + actions: [ + IconButton( + icon: Icon(Icons.keyboard), + onPressed: _sendDtmf, + ), + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + ], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Stack( + children: [ + Container( + decoration: BoxDecoration(color: Colors.black54), + child: orientation == Orientation.portrait + ? Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets) + : Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets), + ), + Align( + alignment: Alignment.bottomCenter, + child: OverflowBar( + children: [ + FloatingActionButton( + heroTag: null, + backgroundColor: + _micOn ? null : Theme.of(context).disabledColor, + tooltip: _micOn ? 'Stop mic' : 'Start mic', + onPressed: _micOn ? _stopAudio : _startAudio, + child: Icon(_micOn ? Icons.mic : Icons.mic_off)), + FloatingActionButton( + heroTag: null, + backgroundColor: + _speakerOn ? null : Theme.of(context).disabledColor, + tooltip: _speakerOn ? 'Stop speaker' : 'Start speaker', + onPressed: _switchSpeaker, + child: Icon(_speakerOn + ? Icons.speaker_phone + : Icons.phone_in_talk)), + FloatingActionButton( + heroTag: null, + backgroundColor: + _cameraOn ? null : Theme.of(context).disabledColor, + tooltip: _cameraOn ? 'Stop camera' : 'Start camera', + onPressed: _cameraOn ? _stopVideo : _startVideo, + child: + Icon(_cameraOn ? Icons.videocam : Icons.videocam_off), + ), + FloatingActionButton( + heroTag: null, + backgroundColor: + _inCalling ? null : Theme.of(context).disabledColor, + onPressed: _inCalling ? _hangUp : _makeCall, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ) + ], + ), + ), + ], + ); + }, + ), + ); + } +} diff --git a/example/lib/src/loopback_sample_with_get_stats.dart b/example/lib/src/loopback_sample_with_get_stats.dart new file mode 100644 index 0000000000..2e457a69d0 --- /dev/null +++ b/example/lib/src/loopback_sample_with_get_stats.dart @@ -0,0 +1,158 @@ +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class LoopBackSampleWithGetStats extends StatefulWidget { + static String tag = 'loopback_sample_with_get_stats'; + + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + MediaStream? _localStream; + RTCPeerConnection? _senderPc, _receiverPc; + + final _localRenderer = RTCVideoRenderer(); + final _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + @override + void initState() { + super.initState(); + initRenderers(); + } + + @override + void deactivate() { + super.deactivate(); + _disconnect(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + } + + void initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _connect() async { + if (_inCalling) { + return; + } + + try { + _senderPc ??= + await createPeerConnection({'sdpSemantics': 'unified-plan'}); + + _receiverPc ??= + await createPeerConnection({'sdpSemantics': 'unified-plan'}); + + _senderPc!.onIceCandidate = (candidate) { + _receiverPc!.addCandidate(candidate); + }; + + _receiverPc!.onIceCandidate = (candidate) { + _senderPc!.addCandidate(candidate); + }; + + _receiverPc?.onAddTrack = (stream, track) { + _remoteRenderer.srcObject = stream; + }; + + // get user media stream + _localStream = await navigator.mediaDevices + .getUserMedia({'audio': true, 'video': true}); + _localRenderer.srcObject = _localStream; + + _localStream!.getTracks().forEach((track) { + _senderPc!.addTrack(track, _localStream!); + }); + + var offer = await _senderPc?.createOffer(); + + await _receiverPc?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + await _receiverPc?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + + await _senderPc?.setLocalDescription(offer!); + await _receiverPc?.setRemoteDescription(offer!); + var answer = await _receiverPc?.createAnswer({}); + await _receiverPc?.setLocalDescription(answer!); + await _senderPc?.setRemoteDescription(answer!); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + void _disconnect() async { + if (!_inCalling) { + return; + } + try { + await _localStream?.dispose(); + await _senderPc?.close(); + _senderPc = null; + await _receiverPc?.close(); + _receiverPc = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + setState(() { + _inCalling = false; + }); + } + + @override + Widget build(BuildContext context) { + var widgets = [ + Expanded( + child: RTCVideoView(_localRenderer, mirror: true), + ), + Expanded( + child: RTCVideoView(_remoteRenderer), + ) + ]; + return Scaffold( + appBar: AppBar( + title: Text('LoopBack with getStats'), + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + decoration: BoxDecoration(color: Colors.black54), + child: orientation == Orientation.portrait + ? Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets) + : Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets), + ), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _disconnect : _connect, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/route_item.dart b/example/lib/src/route_item.dart index 26b6b0bf1a..fd35cb9a85 100644 --- a/example/lib/src/route_item.dart +++ b/example/lib/src/route_item.dart @@ -4,12 +4,12 @@ typedef RouteCallback = void Function(BuildContext context); class RouteItem { RouteItem({ - @required this.title, + required this.title, this.subtitle, this.push, }); final String title; - final String subtitle; - final RouteCallback push; + final String? subtitle; + final RouteCallback? push; } diff --git a/example/lib/src/step-by-step-tutorial.txt b/example/lib/src/step-by-step-tutorial.txt new file mode 100644 index 0000000000..da7349c527 --- /dev/null +++ b/example/lib/src/step-by-step-tutorial.txt @@ -0,0 +1,21 @@ +* get user media +* get display media audio/video, audio only, video only +* get sources/change audio input/output +* audio/video loopback simple +* getStats +* replace track in calling, turn on/off video or audio +* set set codec preferences +* simulcast sender +* send dtmf +* ice restart +* muiltiple tracks on one peerconnection + +data channel +* data channel loopback simple +* transfer a file/data through data channel + +Insertable Streams: +* frame crypto (e2ee) +* frame processing (e.g. face detection, object detection, etc) +* custom audio/video source from image, or file +* capture audioFrame/videoFrame to file or image \ No newline at end of file diff --git a/example/lib/src/utils.dart b/example/lib/src/utils.dart new file mode 100644 index 0000000000..5cb6027f92 --- /dev/null +++ b/example/lib/src/utils.dart @@ -0,0 +1,124 @@ +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; + +void setPreferredCodec(RTCSessionDescription description, + {String audio = 'opus', String video = 'vp8'}) { + var capSel = CodecCapabilitySelector(description.sdp!); + var acaps = capSel.getCapabilities('audio'); + if (acaps != null) { + acaps.codecs = acaps.codecs + .where((e) => (e['codec'] as String).toLowerCase() == audio) + .toList(); + acaps.setCodecPreferences('audio', acaps.codecs); + capSel.setCapabilities(acaps); + } + + var vcaps = capSel.getCapabilities('video'); + if (vcaps != null) { + vcaps.codecs = vcaps.codecs + .where((e) => (e['codec'] as String).toLowerCase() == video) + .toList(); + vcaps.setCodecPreferences('video', vcaps.codecs); + capSel.setCapabilities(vcaps); + } + description.sdp = capSel.sdp(); +} + +class CodecCapability { + CodecCapability( + this.kind, this.payloads, this.codecs, this.fmtp, this.rtcpFb) { + codecs.forEach((element) { + element['orign_payload'] = element['payload']; + }); + } + String kind; + List rtcpFb; + List fmtp; + List payloads; + List codecs; + bool setCodecPreferences(String kind, List? newCodecs) { + if (newCodecs == null) { + return false; + } + var newRtcpFb = []; + var newFmtp = []; + var newPayloads = []; + newCodecs.forEach((element) { + var orign_payload = element['orign_payload'] as int; + var payload = element['payload'] as int; + // change payload type + if (payload != orign_payload) { + newRtcpFb.addAll(rtcpFb.where((e) { + if (e['payload'] == orign_payload) { + e['payload'] = payload; + return true; + } + return false; + }).toList()); + newFmtp.addAll(fmtp.where((e) { + if (e['payload'] == orign_payload) { + e['payload'] = payload; + return true; + } + return false; + }).toList()); + if (payloads.contains('$orign_payload')) { + newPayloads.add('$payload'); + } + } else { + newRtcpFb.addAll(rtcpFb.where((e) => e['payload'] == payload).toList()); + newFmtp.addAll(fmtp.where((e) => e['payload'] == payload).toList()); + newPayloads.addAll(payloads.where((e) => e == '$payload').toList()); + } + }); + rtcpFb = newRtcpFb; + fmtp = newFmtp; + payloads = newPayloads; + codecs = newCodecs; + return true; + } +} + +class CodecCapabilitySelector { + CodecCapabilitySelector(String sdp) { + _sdp = sdp; + _session = sdp_transform.parse(_sdp); + } + late String _sdp; + late Map _session; + Map get session => _session; + String sdp() => sdp_transform.write(_session, null); + + CodecCapability? getCapabilities(String kind) { + var mline = _mline(kind); + if (mline == null) { + return null; + } + var rtcpFb = mline['rtcpFb'] ?? []; + var fmtp = mline['fmtp'] ?? []; + var payloads = (mline['payloads'] as String).split(' '); + var codecs = mline['rtp'] ?? []; + return CodecCapability(kind, payloads, codecs, fmtp, rtcpFb); + } + + bool setCapabilities(CodecCapability? caps) { + if (caps == null) { + return false; + } + var mline = _mline(caps.kind); + if (mline == null) { + return false; + } + mline['payloads'] = caps.payloads.join(' '); + mline['rtp'] = caps.codecs; + mline['fmtp'] = caps.fmtp; + mline['rtcpFb'] = caps.rtcpFb; + return true; + } + + Map? _mline(String kind) { + var mlist = _session['media'] as List; + return mlist.firstWhere((element) => element['type'] == kind, + orElse: () => null); + } +} diff --git a/example/lib/src/widgets/screen_select_dialog.dart b/example/lib/src/widgets/screen_select_dialog.dart new file mode 100644 index 0000000000..c6e809d88e --- /dev/null +++ b/example/lib/src/widgets/screen_select_dialog.dart @@ -0,0 +1,310 @@ +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class ThumbnailWidget extends StatefulWidget { + const ThumbnailWidget( + {Key? key, + required this.source, + required this.selected, + required this.onTap}) + : super(key: key); + final DesktopCapturerSource source; + final bool selected; + final Function(DesktopCapturerSource) onTap; + + @override + _ThumbnailWidgetState createState() => _ThumbnailWidgetState(); +} + +class _ThumbnailWidgetState extends State { + final List _subscriptions = []; + Uint8List? _thumbnail; + @override + void initState() { + super.initState(); + _subscriptions.add(widget.source.onThumbnailChanged.stream.listen((event) { + setState(() { + _thumbnail = event; + }); + })); + _subscriptions.add(widget.source.onNameChanged.stream.listen((event) { + setState(() {}); + })); + } + + @override + void deactivate() { + _subscriptions.forEach((element) { + element.cancel(); + }); + super.deactivate(); + } + + @override + Widget build(BuildContext context) { + return Column( + children: [ + Expanded( + child: Container( + decoration: widget.selected + ? BoxDecoration( + border: Border.all(width: 2, color: Colors.blueAccent)) + : null, + child: InkWell( + onTap: () { + print('Selected source id => ${widget.source.id}'); + widget.onTap(widget.source); + }, + child: _thumbnail != null + ? Image.memory( + _thumbnail!, + gaplessPlayback: true, + alignment: Alignment.center, + ) + : Container(), + ), + )), + Text( + widget.source.name, + style: TextStyle( + fontSize: 12, + color: Colors.black87, + fontWeight: + widget.selected ? FontWeight.bold : FontWeight.normal), + ), + ], + ); + } +} + +// ignore: must_be_immutable +class ScreenSelectDialog extends Dialog { + ScreenSelectDialog() { + Future.delayed(Duration(milliseconds: 100), () { + _getSources(); + }); + _subscriptions.add(desktopCapturer.onAdded.stream.listen((source) { + _sources[source.id] = source; + _stateSetter?.call(() {}); + })); + + _subscriptions.add(desktopCapturer.onRemoved.stream.listen((source) { + _sources.remove(source.id); + _stateSetter?.call(() {}); + })); + + _subscriptions + .add(desktopCapturer.onThumbnailChanged.stream.listen((source) { + _stateSetter?.call(() {}); + })); + } + final Map _sources = {}; + SourceType _sourceType = SourceType.Screen; + DesktopCapturerSource? _selected_source; + final List> _subscriptions = []; + StateSetter? _stateSetter; + Timer? _timer; + + void _ok(context) async { + _timer?.cancel(); + _subscriptions.forEach((element) { + element.cancel(); + }); + Navigator.pop(context, _selected_source); + } + + void _cancel(context) async { + _timer?.cancel(); + _subscriptions.forEach((element) { + element.cancel(); + }); + Navigator.pop(context, null); + } + + Future _getSources() async { + try { + var sources = await desktopCapturer.getSources(types: [_sourceType]); + sources.forEach((element) { + print( + 'name: ${element.name}, id: ${element.id}, type: ${element.type}'); + }); + _timer?.cancel(); + _timer = Timer.periodic(Duration(seconds: 3), (timer) { + desktopCapturer.updateSources(types: [_sourceType]); + }); + _sources.clear(); + sources.forEach((element) { + _sources[element.id] = element; + }); + _stateSetter?.call(() {}); + return; + } catch (e) { + print(e.toString()); + } + } + + @override + Widget build(BuildContext context) { + return Material( + type: MaterialType.transparency, + child: Center( + child: Container( + width: 640, + height: 560, + color: Colors.white, + child: Column( + children: [ + Padding( + padding: EdgeInsets.all(10), + child: Stack( + children: [ + Align( + alignment: Alignment.topLeft, + child: Text( + 'Choose what to share', + style: TextStyle(fontSize: 16, color: Colors.black87), + ), + ), + Align( + alignment: Alignment.topRight, + child: InkWell( + child: Icon(Icons.close), + onTap: () => _cancel(context), + ), + ), + ], + ), + ), + Expanded( + flex: 1, + child: Container( + width: double.infinity, + padding: EdgeInsets.all(10), + child: StatefulBuilder( + builder: (context, setState) { + _stateSetter = setState; + return DefaultTabController( + length: 2, + child: Column( + children: [ + Container( + constraints: BoxConstraints.expand(height: 24), + child: TabBar( + onTap: (value) => + Future.delayed(Duration.zero, () { + _sourceType = value == 0 + ? SourceType.Screen + : SourceType.Window; + _getSources(); + }), + tabs: [ + Tab( + child: Text( + 'Entire Screen', + style: TextStyle(color: Colors.black54), + )), + Tab( + child: Text( + 'Window', + style: TextStyle(color: Colors.black54), + )), + ]), + ), + SizedBox( + height: 2, + ), + Expanded( + child: Container( + child: TabBarView(children: [ + Align( + alignment: Alignment.center, + child: Container( + child: GridView.count( + crossAxisSpacing: 8, + crossAxisCount: 2, + children: _sources.entries + .where((element) => + element.value.type == + SourceType.Screen) + .map((e) => ThumbnailWidget( + onTap: (source) { + setState(() { + _selected_source = source; + }); + }, + source: e.value, + selected: + _selected_source?.id == + e.value.id, + )) + .toList(), + ), + )), + Align( + alignment: Alignment.center, + child: Container( + child: GridView.count( + crossAxisSpacing: 8, + crossAxisCount: 3, + children: _sources.entries + .where((element) => + element.value.type == + SourceType.Window) + .map((e) => ThumbnailWidget( + onTap: (source) { + setState(() { + _selected_source = source; + }); + }, + source: e.value, + selected: + _selected_source?.id == + e.value.id, + )) + .toList(), + ), + )), + ]), + ), + ) + ], + ), + ); + }, + ), + ), + ), + Container( + width: double.infinity, + child: OverflowBar( + children: [ + MaterialButton( + child: Text( + 'Cancel', + style: TextStyle(color: Colors.black54), + ), + onPressed: () { + _cancel(context); + }, + ), + MaterialButton( + color: Theme.of(context).primaryColor, + child: Text( + 'Share', + ), + onPressed: () { + _ok(context); + }, + ), + ], + ), + ), + ], + ), + )), + ); + } +} diff --git a/example/linux/.gitignore b/example/linux/.gitignore new file mode 100644 index 0000000000..d3896c9844 --- /dev/null +++ b/example/linux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral diff --git a/example/linux/CMakeLists.txt b/example/linux/CMakeLists.txt new file mode 100644 index 0000000000..aec6e1080e --- /dev/null +++ b/example/linux/CMakeLists.txt @@ -0,0 +1,139 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.10) +project(runner LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "flutter_webrtc_example") +# The unique GTK application identifier for this application. See: +# https://wiki.gnome.org/HowDoI/ChooseApplicationID +set(APPLICATION_ID "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Load bundled libraries from the lib/ directory relative to the binary. +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Root filesystem for cross-building. +if(FLUTTER_TARGET_PLATFORM_SYSROOT) + set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +endif() + +# Define build configuration options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_14) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) + +add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}") + +# Define the application target. To change its name, change BINARY_NAME above, +# not the value here, or `flutter run` will no longer work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} + "main.cc" + "my_application.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add dependency libraries. Add any application-specific dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK) + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) + +# Only the install-generated bundle's copy of the executable will launch +# correctly, since the resources must in the right relative locations. To avoid +# people trying to run the unbundled copy, put it in a subdirectory instead of +# the default top-level location. +set_target_properties(${BINARY_NAME} + PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run" +) + + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES}) + install(FILES "${bundled_library}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endforeach(bundled_library) + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/example/linux/flutter/CMakeLists.txt b/example/linux/flutter/CMakeLists.txt new file mode 100644 index 0000000000..d5bd01648a --- /dev/null +++ b/example/linux/flutter/CMakeLists.txt @@ -0,0 +1,88 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.10) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) +pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) +pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0) + +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "fl_basic_message_channel.h" + "fl_binary_codec.h" + "fl_binary_messenger.h" + "fl_dart_project.h" + "fl_engine.h" + "fl_json_message_codec.h" + "fl_json_method_codec.h" + "fl_message_codec.h" + "fl_method_call.h" + "fl_method_channel.h" + "fl_method_codec.h" + "fl_method_response.h" + "fl_plugin_registrar.h" + "fl_plugin_registry.h" + "fl_standard_message_codec.h" + "fl_standard_method_codec.h" + "fl_string_codec.h" + "fl_value.h" + "fl_view.h" + "flutter_linux.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE + PkgConfig::GTK + PkgConfig::GLIB + PkgConfig::GIO +) +add_dependencies(flutter flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CMAKE_CURRENT_BINARY_DIR}/_phony_ + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh" + ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE} + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} +) diff --git a/example/linux/flutter/generated_plugin_registrant.cc b/example/linux/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..3f48831149 --- /dev/null +++ b/example/linux/flutter/generated_plugin_registrant.cc @@ -0,0 +1,15 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include + +void fl_register_plugins(FlPluginRegistry* registry) { + g_autoptr(FlPluginRegistrar) flutter_webrtc_registrar = + fl_plugin_registry_get_registrar_for_plugin(registry, "FlutterWebRTCPlugin"); + flutter_web_r_t_c_plugin_register_with_registrar(flutter_webrtc_registrar); +} diff --git a/example/linux/flutter/generated_plugin_registrant.h b/example/linux/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..e0f0a47bc0 --- /dev/null +++ b/example/linux/flutter/generated_plugin_registrant.h @@ -0,0 +1,15 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void fl_register_plugins(FlPluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ diff --git a/example/linux/flutter/generated_plugins.cmake b/example/linux/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..57172770e6 --- /dev/null +++ b/example/linux/flutter/generated_plugins.cmake @@ -0,0 +1,24 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc +) + +list(APPEND FLUTTER_FFI_PLUGIN_LIST +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/linux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) + +foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/linux plugins/${ffi_plugin}) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries}) +endforeach(ffi_plugin) diff --git a/example/linux/main.cc b/example/linux/main.cc new file mode 100644 index 0000000000..e7c5c54370 --- /dev/null +++ b/example/linux/main.cc @@ -0,0 +1,6 @@ +#include "my_application.h" + +int main(int argc, char** argv) { + g_autoptr(MyApplication) app = my_application_new(); + return g_application_run(G_APPLICATION(app), argc, argv); +} diff --git a/example/linux/my_application.cc b/example/linux/my_application.cc new file mode 100644 index 0000000000..0d05258c03 --- /dev/null +++ b/example/linux/my_application.cc @@ -0,0 +1,104 @@ +#include "my_application.h" + +#include +#ifdef GDK_WINDOWING_X11 +#include +#endif + +#include "flutter/generated_plugin_registrant.h" + +struct _MyApplication { + GtkApplication parent_instance; + char** dart_entrypoint_arguments; +}; + +G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION) + +// Implements GApplication::activate. +static void my_application_activate(GApplication* application) { + MyApplication* self = MY_APPLICATION(application); + GtkWindow* window = + GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application))); + + // Use a header bar when running in GNOME as this is the common style used + // by applications and is the setup most users will be using (e.g. Ubuntu + // desktop). + // If running on X and not using GNOME then just use a traditional title bar + // in case the window manager does more exotic layout, e.g. tiling. + // If running on Wayland assume the header bar will work (may need changing + // if future cases occur). + gboolean use_header_bar = TRUE; +#ifdef GDK_WINDOWING_X11 + GdkScreen* screen = gtk_window_get_screen(window); + if (GDK_IS_X11_SCREEN(screen)) { + const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen); + if (g_strcmp0(wm_name, "GNOME Shell") != 0) { + use_header_bar = FALSE; + } + } +#endif + if (use_header_bar) { + GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new()); + gtk_widget_show(GTK_WIDGET(header_bar)); + gtk_header_bar_set_title(header_bar, "flutter_webrtc_example"); + gtk_header_bar_set_show_close_button(header_bar, TRUE); + gtk_window_set_titlebar(window, GTK_WIDGET(header_bar)); + } else { + gtk_window_set_title(window, "flutter_webrtc_example"); + } + + gtk_window_set_default_size(window, 1280, 720); + gtk_widget_show(GTK_WIDGET(window)); + + g_autoptr(FlDartProject) project = fl_dart_project_new(); + fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments); + + FlView* view = fl_view_new(project); + gtk_widget_show(GTK_WIDGET(view)); + gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view)); + + fl_register_plugins(FL_PLUGIN_REGISTRY(view)); + + gtk_widget_grab_focus(GTK_WIDGET(view)); +} + +// Implements GApplication::local_command_line. +static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) { + MyApplication* self = MY_APPLICATION(application); + // Strip out the first argument as it is the binary name. + self->dart_entrypoint_arguments = g_strdupv(*arguments + 1); + + g_autoptr(GError) error = nullptr; + if (!g_application_register(application, nullptr, &error)) { + g_warning("Failed to register: %s", error->message); + *exit_status = 1; + return TRUE; + } + + g_application_activate(application); + *exit_status = 0; + + return TRUE; +} + +// Implements GObject::dispose. +static void my_application_dispose(GObject* object) { + MyApplication* self = MY_APPLICATION(object); + g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev); + G_OBJECT_CLASS(my_application_parent_class)->dispose(object); +} + +static void my_application_class_init(MyApplicationClass* klass) { + G_APPLICATION_CLASS(klass)->activate = my_application_activate; + G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line; + G_OBJECT_CLASS(klass)->dispose = my_application_dispose; +} + +static void my_application_init(MyApplication* self) {} + +MyApplication* my_application_new() { + return MY_APPLICATION(g_object_new(my_application_get_type(), + "application-id", APPLICATION_ID, + "flags", G_APPLICATION_NON_UNIQUE, + nullptr)); +} diff --git a/example/linux/my_application.h b/example/linux/my_application.h new file mode 100644 index 0000000000..72271d5e41 --- /dev/null +++ b/example/linux/my_application.h @@ -0,0 +1,18 @@ +#ifndef FLUTTER_MY_APPLICATION_H_ +#define FLUTTER_MY_APPLICATION_H_ + +#include + +G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION, + GtkApplication) + +/** + * my_application_new: + * + * Creates a new Flutter-based application. + * + * Returns: a new #MyApplication. + */ +MyApplication* my_application_new(); + +#endif // FLUTTER_MY_APPLICATION_H_ diff --git a/example/macos/.gitignore b/example/macos/.gitignore new file mode 100644 index 0000000000..746adbb6b9 --- /dev/null +++ b/example/macos/.gitignore @@ -0,0 +1,7 @@ +# Flutter-related +**/Flutter/ephemeral/ +**/Pods/ + +# Xcode-related +**/dgph +**/xcuserdata/ diff --git a/example/macos/Flutter/Flutter-Debug.xcconfig b/example/macos/Flutter/Flutter-Debug.xcconfig new file mode 100644 index 0000000000..4b81f9b2d2 --- /dev/null +++ b/example/macos/Flutter/Flutter-Debug.xcconfig @@ -0,0 +1,2 @@ +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include "ephemeral/Flutter-Generated.xcconfig" diff --git a/example/macos/Flutter/Flutter-Release.xcconfig b/example/macos/Flutter/Flutter-Release.xcconfig new file mode 100644 index 0000000000..5caa9d1579 --- /dev/null +++ b/example/macos/Flutter/Flutter-Release.xcconfig @@ -0,0 +1,2 @@ +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include "ephemeral/Flutter-Generated.xcconfig" diff --git a/example/macos/Flutter/GeneratedPluginRegistrant.swift b/example/macos/Flutter/GeneratedPluginRegistrant.swift new file mode 100644 index 0000000000..194710fc5a --- /dev/null +++ b/example/macos/Flutter/GeneratedPluginRegistrant.swift @@ -0,0 +1,14 @@ +// +// Generated file. Do not edit. +// + +import FlutterMacOS +import Foundation + +import flutter_webrtc +import path_provider_foundation + +func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) { + FlutterWebRTCPlugin.register(with: registry.registrar(forPlugin: "FlutterWebRTCPlugin")) + PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin")) +} diff --git a/example/macos/Podfile b/example/macos/Podfile new file mode 100644 index 0000000000..b52666a103 --- /dev/null +++ b/example/macos/Podfile @@ -0,0 +1,43 @@ +platform :osx, '10.15' + +# CocoaPods analytics sends network stats synchronously affecting flutter build latency. +ENV['COCOAPODS_DISABLE_STATS'] = 'true' + +project 'Runner', { + 'Debug' => :debug, + 'Profile' => :release, + 'Release' => :release, +} + +def flutter_root + generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'ephemeral', 'Flutter-Generated.xcconfig'), __FILE__) + unless File.exist?(generated_xcode_build_settings_path) + raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure \"flutter pub get\" is executed first" + end + + File.foreach(generated_xcode_build_settings_path) do |line| + matches = line.match(/FLUTTER_ROOT\=(.*)/) + return matches[1].strip if matches + end + raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Flutter-Generated.xcconfig, then run \"flutter pub get\"" +end + +require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root) + +flutter_macos_podfile_setup + +target 'Runner' do + use_frameworks! + use_modular_headers! + + flutter_install_all_macos_pods File.dirname(File.realpath(__FILE__)) + target 'RunnerTests' do + inherit! :search_paths + end +end + +post_install do |installer| + installer.pods_project.targets.each do |target| + flutter_additional_macos_build_settings(target) + end +end diff --git a/example/macos/Runner.xcodeproj/project.pbxproj b/example/macos/Runner.xcodeproj/project.pbxproj new file mode 100644 index 0000000000..e2c46d9f3f --- /dev/null +++ b/example/macos/Runner.xcodeproj/project.pbxproj @@ -0,0 +1,698 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 54; + objects = { + +/* Begin PBXAggregateTarget section */ + 33CC111A2044C6BA0003C045 /* Flutter Assemble */ = { + isa = PBXAggregateTarget; + buildConfigurationList = 33CC111B2044C6BA0003C045 /* Build configuration list for PBXAggregateTarget "Flutter Assemble" */; + buildPhases = ( + 33CC111E2044C6BF0003C045 /* ShellScript */, + ); + dependencies = ( + ); + name = "Flutter Assemble"; + productName = FLX; + }; +/* End PBXAggregateTarget section */ + +/* Begin PBXBuildFile section */ + 331C80D8294CF71000263BE5 /* RunnerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331C80D7294CF71000263BE5 /* RunnerTests.swift */; }; + 335BBD1B22A9A15E00E9071D /* GeneratedPluginRegistrant.swift in Sources */ = {isa = PBXBuildFile; fileRef = 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */; }; + 33CC10F12044A3C60003C045 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33CC10F02044A3C60003C045 /* AppDelegate.swift */; }; + 33CC10F32044A3C60003C045 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 33CC10F22044A3C60003C045 /* Assets.xcassets */; }; + 33CC10F62044A3C60003C045 /* MainMenu.xib in Resources */ = {isa = PBXBuildFile; fileRef = 33CC10F42044A3C60003C045 /* MainMenu.xib */; }; + 33CC11132044BFA00003C045 /* MainFlutterWindow.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + 331C80D9294CF71000263BE5 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 33CC10E52044A3C60003C045 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 33CC10EC2044A3C60003C045; + remoteInfo = Runner; + }; + 33CC111F2044C79F0003C045 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 33CC10E52044A3C60003C045 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 33CC111A2044C6BA0003C045; + remoteInfo = FLX; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 33CC110E2044A8840003C045 /* Bundle Framework */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + ); + name = "Bundle Framework"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 331C80D5294CF71000263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + 331C80D7294CF71000263BE5 /* RunnerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunnerTests.swift; sourceTree = ""; }; + 333000ED22D3DE5D00554162 /* Warnings.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Warnings.xcconfig; sourceTree = ""; }; + 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GeneratedPluginRegistrant.swift; sourceTree = ""; }; + 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = flutter_webrtc_example.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 33CC10F02044A3C60003C045 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 33CC10F22044A3C60003C045 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Assets.xcassets; path = Runner/Assets.xcassets; sourceTree = ""; }; + 33CC10F52044A3C60003C045 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/MainMenu.xib; sourceTree = ""; }; + 33CC10F72044A3C60003C045 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; name = Info.plist; path = Runner/Info.plist; sourceTree = ""; }; + 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MainFlutterWindow.swift; sourceTree = ""; }; + 33CEB47222A05771004F2AC0 /* Flutter-Debug.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Flutter-Debug.xcconfig"; sourceTree = ""; }; + 33CEB47422A05771004F2AC0 /* Flutter-Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Flutter-Release.xcconfig"; sourceTree = ""; }; + 33CEB47722A0578A004F2AC0 /* Flutter-Generated.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = "Flutter-Generated.xcconfig"; path = "ephemeral/Flutter-Generated.xcconfig"; sourceTree = ""; }; + 33E51913231747F40026EE4D /* DebugProfile.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = DebugProfile.entitlements; sourceTree = ""; }; + 33E51914231749380026EE4D /* Release.entitlements */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.entitlements; path = Release.entitlements; sourceTree = ""; }; + 33E5194F232828860026EE4D /* AppInfo.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = AppInfo.xcconfig; sourceTree = ""; }; + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Release.xcconfig; sourceTree = ""; }; + 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Debug.xcconfig; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 331C80D2294CF70F00263BE5 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 33CC10EA2044A3C60003C045 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 331C80D6294CF71000263BE5 /* RunnerTests */ = { + isa = PBXGroup; + children = ( + 331C80D7294CF71000263BE5 /* RunnerTests.swift */, + ); + path = RunnerTests; + sourceTree = ""; + }; + 33BA886A226E78AF003329D5 /* Configs */ = { + isa = PBXGroup; + children = ( + 33E5194F232828860026EE4D /* AppInfo.xcconfig */, + 9740EEB21CF90195004384FC /* Debug.xcconfig */, + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, + 333000ED22D3DE5D00554162 /* Warnings.xcconfig */, + ); + path = Configs; + sourceTree = ""; + }; + 33CC10E42044A3C60003C045 = { + isa = PBXGroup; + children = ( + 33FAB671232836740065AC1E /* Runner */, + 33CEB47122A05771004F2AC0 /* Flutter */, + 331C80D6294CF71000263BE5 /* RunnerTests */, + 33CC10EE2044A3C60003C045 /* Products */, + D73912EC22F37F3D000D13A0 /* Frameworks */, + ); + sourceTree = ""; + }; + 33CC10EE2044A3C60003C045 /* Products */ = { + isa = PBXGroup; + children = ( + 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */, + 331C80D5294CF71000263BE5 /* RunnerTests.xctest */, + ); + name = Products; + sourceTree = ""; + }; + 33CC11242044D66E0003C045 /* Resources */ = { + isa = PBXGroup; + children = ( + 33CC10F22044A3C60003C045 /* Assets.xcassets */, + 33CC10F42044A3C60003C045 /* MainMenu.xib */, + 33CC10F72044A3C60003C045 /* Info.plist */, + ); + name = Resources; + path = ..; + sourceTree = ""; + }; + 33CEB47122A05771004F2AC0 /* Flutter */ = { + isa = PBXGroup; + children = ( + 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */, + 33CEB47222A05771004F2AC0 /* Flutter-Debug.xcconfig */, + 33CEB47422A05771004F2AC0 /* Flutter-Release.xcconfig */, + 33CEB47722A0578A004F2AC0 /* Flutter-Generated.xcconfig */, + ); + path = Flutter; + sourceTree = ""; + }; + 33FAB671232836740065AC1E /* Runner */ = { + isa = PBXGroup; + children = ( + 33CC10F02044A3C60003C045 /* AppDelegate.swift */, + 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */, + 33E51913231747F40026EE4D /* DebugProfile.entitlements */, + 33E51914231749380026EE4D /* Release.entitlements */, + 33CC11242044D66E0003C045 /* Resources */, + 33BA886A226E78AF003329D5 /* Configs */, + ); + path = Runner; + sourceTree = ""; + }; + D73912EC22F37F3D000D13A0 /* Frameworks */ = { + isa = PBXGroup; + children = ( + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 331C80D4294CF70F00263BE5 /* RunnerTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = 331C80DE294CF71000263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; + buildPhases = ( + 331C80D1294CF70F00263BE5 /* Sources */, + 331C80D2294CF70F00263BE5 /* Frameworks */, + 331C80D3294CF70F00263BE5 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + 331C80DA294CF71000263BE5 /* PBXTargetDependency */, + ); + name = RunnerTests; + productName = RunnerTests; + productReference = 331C80D5294CF71000263BE5 /* RunnerTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; + 33CC10EC2044A3C60003C045 /* Runner */ = { + isa = PBXNativeTarget; + buildConfigurationList = 33CC10FB2044A3C60003C045 /* Build configuration list for PBXNativeTarget "Runner" */; + buildPhases = ( + 33CC10E92044A3C60003C045 /* Sources */, + 33CC10EA2044A3C60003C045 /* Frameworks */, + 33CC10EB2044A3C60003C045 /* Resources */, + 33CC110E2044A8840003C045 /* Bundle Framework */, + 3399D490228B24CF009A79C7 /* ShellScript */, + ); + buildRules = ( + ); + dependencies = ( + 33CC11202044C79F0003C045 /* PBXTargetDependency */, + ); + name = Runner; + productName = Runner; + productReference = 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 33CC10E52044A3C60003C045 /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 0920; + LastUpgradeCheck = 1300; + ORGANIZATIONNAME = ""; + TargetAttributes = { + 331C80D4294CF70F00263BE5 = { + CreatedOnToolsVersion = 14.0; + TestTargetID = 33CC10EC2044A3C60003C045; + }; + 33CC10EC2044A3C60003C045 = { + CreatedOnToolsVersion = 9.2; + LastSwiftMigration = 1100; + ProvisioningStyle = Automatic; + SystemCapabilities = { + com.apple.Sandbox = { + enabled = 1; + }; + }; + }; + 33CC111A2044C6BA0003C045 = { + CreatedOnToolsVersion = 9.2; + ProvisioningStyle = Manual; + }; + }; + }; + buildConfigurationList = 33CC10E82044A3C60003C045 /* Build configuration list for PBXProject "Runner" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 33CC10E42044A3C60003C045; + productRefGroup = 33CC10EE2044A3C60003C045 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 33CC10EC2044A3C60003C045 /* Runner */, + 331C80D4294CF70F00263BE5 /* RunnerTests */, + 33CC111A2044C6BA0003C045 /* Flutter Assemble */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 331C80D3294CF70F00263BE5 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 33CC10EB2044A3C60003C045 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 33CC10F32044A3C60003C045 /* Assets.xcassets in Resources */, + 33CC10F62044A3C60003C045 /* MainMenu.xib in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 3399D490228B24CF009A79C7 /* ShellScript */ = { + isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + ); + outputFileListPaths = ( + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "echo \"$PRODUCT_NAME.app\" > \"$PROJECT_DIR\"/Flutter/ephemeral/.app_filename && \"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh embed\n"; + }; + 33CC111E2044C6BF0003C045 /* ShellScript */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + Flutter/ephemeral/FlutterInputs.xcfilelist, + ); + inputPaths = ( + Flutter/ephemeral/tripwire, + ); + outputFileListPaths = ( + Flutter/ephemeral/FlutterOutputs.xcfilelist, + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh && touch Flutter/ephemeral/tripwire"; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 331C80D1294CF70F00263BE5 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 331C80D8294CF71000263BE5 /* RunnerTests.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 33CC10E92044A3C60003C045 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 33CC11132044BFA00003C045 /* MainFlutterWindow.swift in Sources */, + 33CC10F12044A3C60003C045 /* AppDelegate.swift in Sources */, + 335BBD1B22A9A15E00E9071D /* GeneratedPluginRegistrant.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + 331C80DA294CF71000263BE5 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 33CC10EC2044A3C60003C045 /* Runner */; + targetProxy = 331C80D9294CF71000263BE5 /* PBXContainerItemProxy */; + }; + 33CC11202044C79F0003C045 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 33CC111A2044C6BA0003C045 /* Flutter Assemble */; + targetProxy = 33CC111F2044C79F0003C045 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + 33CC10F42044A3C60003C045 /* MainMenu.xib */ = { + isa = PBXVariantGroup; + children = ( + 33CC10F52044A3C60003C045 /* Base */, + ); + name = MainMenu.xib; + path = Runner; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 331C80DB294CF71000263BE5 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; + }; + name = Debug; + }; + 331C80DC294CF71000263BE5 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; + }; + name = Release; + }; + 331C80DD294CF71000263BE5 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; + }; + name = Profile; + }; + 338D0CE9231458BD00FA5F75 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CODE_SIGN_IDENTITY = "-"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + MACOSX_DEPLOYMENT_TARGET = 10.14; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = macosx; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + }; + name = Profile; + }; + 338D0CEA231458BD00FA5F75 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 33E5194F232828860026EE4D /* AppInfo.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_ENTITLEMENTS = Runner/DebugProfile.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + MACOSX_DEPLOYMENT_TARGET = 15.0; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_VERSION = 5.0; + }; + name = Profile; + }; + 338D0CEB231458BD00FA5F75 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Manual; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Profile; + }; + 33CC10F92044A3C60003C045 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CODE_SIGN_IDENTITY = "-"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + MACOSX_DEPLOYMENT_TARGET = 10.14; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = macosx; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 33CC10FA2044A3C60003C045 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CODE_SIGN_IDENTITY = "-"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + MACOSX_DEPLOYMENT_TARGET = 10.14; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = macosx; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + }; + name = Release; + }; + 33CC10FC2044A3C60003C045 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 33E5194F232828860026EE4D /* AppInfo.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_ENTITLEMENTS = Runner/DebugProfile.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + MACOSX_DEPLOYMENT_TARGET = 15.0; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + }; + name = Debug; + }; + 33CC10FD2044A3C60003C045 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 33E5194F232828860026EE4D /* AppInfo.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_ENTITLEMENTS = Runner/Release.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + MACOSX_DEPLOYMENT_TARGET = 15.0; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_VERSION = 5.0; + }; + name = Release; + }; + 33CC111C2044C6BA0003C045 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Manual; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Debug; + }; + 33CC111D2044C6BA0003C045 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 331C80DE294CF71000263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 331C80DB294CF71000263BE5 /* Debug */, + 331C80DC294CF71000263BE5 /* Release */, + 331C80DD294CF71000263BE5 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 33CC10E82044A3C60003C045 /* Build configuration list for PBXProject "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 33CC10F92044A3C60003C045 /* Debug */, + 33CC10FA2044A3C60003C045 /* Release */, + 338D0CE9231458BD00FA5F75 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 33CC10FB2044A3C60003C045 /* Build configuration list for PBXNativeTarget "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 33CC10FC2044A3C60003C045 /* Debug */, + 33CC10FD2044A3C60003C045 /* Release */, + 338D0CEA231458BD00FA5F75 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 33CC111B2044C6BA0003C045 /* Build configuration list for PBXAggregateTarget "Flutter Assemble" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 33CC111C2044C6BA0003C045 /* Debug */, + 33CC111D2044C6BA0003C045 /* Release */, + 338D0CEB231458BD00FA5F75 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 33CC10E52044A3C60003C045 /* Project object */; +} diff --git a/example/macos/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/macos/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/macos/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme new file mode 100644 index 0000000000..a4df20256a --- /dev/null +++ b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -0,0 +1,98 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/macos/Runner.xcworkspace/contents.xcworkspacedata b/example/macos/Runner.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000000..1d526a16ed --- /dev/null +++ b/example/macos/Runner.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/example/macos/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/macos/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/macos/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/macos/Runner/AppDelegate.swift b/example/macos/Runner/AppDelegate.swift new file mode 100644 index 0000000000..8e02df2888 --- /dev/null +++ b/example/macos/Runner/AppDelegate.swift @@ -0,0 +1,9 @@ +import Cocoa +import FlutterMacOS + +@main +class AppDelegate: FlutterAppDelegate { + override func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool { + return true + } +} diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000000..a2ec33f19f --- /dev/null +++ b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,68 @@ +{ + "images" : [ + { + "size" : "16x16", + "idiom" : "mac", + "filename" : "app_icon_16.png", + "scale" : "1x" + }, + { + "size" : "16x16", + "idiom" : "mac", + "filename" : "app_icon_32.png", + "scale" : "2x" + }, + { + "size" : "32x32", + "idiom" : "mac", + "filename" : "app_icon_32.png", + "scale" : "1x" + }, + { + "size" : "32x32", + "idiom" : "mac", + "filename" : "app_icon_64.png", + "scale" : "2x" + }, + { + "size" : "128x128", + "idiom" : "mac", + "filename" : "app_icon_128.png", + "scale" : "1x" + }, + { + "size" : "128x128", + "idiom" : "mac", + "filename" : "app_icon_256.png", + "scale" : "2x" + }, + { + "size" : "256x256", + "idiom" : "mac", + "filename" : "app_icon_256.png", + "scale" : "1x" + }, + { + "size" : "256x256", + "idiom" : "mac", + "filename" : "app_icon_512.png", + "scale" : "2x" + }, + { + "size" : "512x512", + "idiom" : "mac", + "filename" : "app_icon_512.png", + "scale" : "1x" + }, + { + "size" : "512x512", + "idiom" : "mac", + "filename" : "app_icon_1024.png", + "scale" : "2x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_1024.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_1024.png new file mode 100644 index 0000000000..82b6f9d9a3 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_1024.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_128.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_128.png new file mode 100644 index 0000000000..13b35eba55 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_128.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_16.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_16.png new file mode 100644 index 0000000000..0a3f5fa40f Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_16.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_256.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_256.png new file mode 100644 index 0000000000..bdb57226d5 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_256.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_32.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_32.png new file mode 100644 index 0000000000..f083318e09 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_32.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_512.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_512.png new file mode 100644 index 0000000000..326c0e72c9 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_512.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_64.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_64.png new file mode 100644 index 0000000000..2f1632cfdd Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_64.png differ diff --git a/example/macos/Runner/Base.lproj/MainMenu.xib b/example/macos/Runner/Base.lproj/MainMenu.xib new file mode 100644 index 0000000000..80e867a4e0 --- /dev/null +++ b/example/macos/Runner/Base.lproj/MainMenu.xib @@ -0,0 +1,343 @@ + + + + + + + + + + + + + + + + + + + + + + +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/macos/Runner/Configs/AppInfo.xcconfig b/example/macos/Runner/Configs/AppInfo.xcconfig new file mode 100644 index 0000000000..27d7cbf31a --- /dev/null +++ b/example/macos/Runner/Configs/AppInfo.xcconfig @@ -0,0 +1,14 @@ +// Application-level settings for the Runner target. +// +// This may be replaced with something auto-generated from metadata (e.g., pubspec.yaml) in the +// future. If not, the values below would default to using the project name when this becomes a +// 'flutter create' template. + +// The application's name. By default this is also the title of the Flutter window. +PRODUCT_NAME = flutter_webrtc_example + +// The application's bundle identifier +PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample + +// The copyright displayed in application information +PRODUCT_COPYRIGHT = Copyright © 2023 com.cloudwebrtc.flutter-flutter-example. All rights reserved. diff --git a/example/macos/Runner/Configs/Debug.xcconfig b/example/macos/Runner/Configs/Debug.xcconfig new file mode 100644 index 0000000000..36b0fd9464 --- /dev/null +++ b/example/macos/Runner/Configs/Debug.xcconfig @@ -0,0 +1,2 @@ +#include "../../Flutter/Flutter-Debug.xcconfig" +#include "Warnings.xcconfig" diff --git a/example/macos/Runner/Configs/Release.xcconfig b/example/macos/Runner/Configs/Release.xcconfig new file mode 100644 index 0000000000..dff4f49561 --- /dev/null +++ b/example/macos/Runner/Configs/Release.xcconfig @@ -0,0 +1,2 @@ +#include "../../Flutter/Flutter-Release.xcconfig" +#include "Warnings.xcconfig" diff --git a/example/macos/Runner/Configs/Warnings.xcconfig b/example/macos/Runner/Configs/Warnings.xcconfig new file mode 100644 index 0000000000..42bcbf4780 --- /dev/null +++ b/example/macos/Runner/Configs/Warnings.xcconfig @@ -0,0 +1,13 @@ +WARNING_CFLAGS = -Wall -Wconditional-uninitialized -Wnullable-to-nonnull-conversion -Wmissing-method-return-type -Woverlength-strings +GCC_WARN_UNDECLARED_SELECTOR = YES +CLANG_UNDEFINED_BEHAVIOR_SANITIZER_NULLABILITY = YES +CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE +CLANG_WARN__DUPLICATE_METHOD_MATCH = YES +CLANG_WARN_PRAGMA_PACK = YES +CLANG_WARN_STRICT_PROTOTYPES = YES +CLANG_WARN_COMMA = YES +GCC_WARN_STRICT_SELECTOR_MATCH = YES +CLANG_WARN_OBJC_REPEATED_USE_OF_WEAK = YES +CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES +GCC_WARN_SHADOW = YES +CLANG_WARN_UNREACHABLE_CODE = YES diff --git a/example/macos/Runner/DebugProfile.entitlements b/example/macos/Runner/DebugProfile.entitlements new file mode 100644 index 0000000000..ab97dbc1c3 --- /dev/null +++ b/example/macos/Runner/DebugProfile.entitlements @@ -0,0 +1,18 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.cs.allow-jit + + com.apple.security.network.server + + com.apple.security.device.camera + + com.apple.security.device.microphone + + com.apple.security.network.client + + + diff --git a/example/macos/Runner/Info.plist b/example/macos/Runner/Info.plist new file mode 100644 index 0000000000..f2c091fe03 --- /dev/null +++ b/example/macos/Runner/Info.plist @@ -0,0 +1,38 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIconFile + + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + NSCameraUsageDescription + $(PRODUCT_NAME) Camera Usage! + NSMicrophoneUsageDescription + $(PRODUCT_NAME) Microphone Usage! + NSCameraUseContinuityCameraDeviceType + $(PRODUCT_NAME) Continuity Camera Usage! + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + LSMinimumSystemVersion + $(MACOSX_DEPLOYMENT_TARGET) + NSHumanReadableCopyright + $(PRODUCT_COPYRIGHT) + NSMainNibFile + MainMenu + NSPrincipalClass + NSApplication + + diff --git a/example/macos/Runner/MainFlutterWindow.swift b/example/macos/Runner/MainFlutterWindow.swift new file mode 100644 index 0000000000..3cc05eb234 --- /dev/null +++ b/example/macos/Runner/MainFlutterWindow.swift @@ -0,0 +1,15 @@ +import Cocoa +import FlutterMacOS + +class MainFlutterWindow: NSWindow { + override func awakeFromNib() { + let flutterViewController = FlutterViewController() + let windowFrame = self.frame + self.contentViewController = flutterViewController + self.setFrame(windowFrame, display: true) + + RegisterGeneratedPlugins(registry: flutterViewController) + + super.awakeFromNib() + } +} diff --git a/example/macos/Runner/Release.entitlements b/example/macos/Runner/Release.entitlements new file mode 100644 index 0000000000..9fb1f51c72 --- /dev/null +++ b/example/macos/Runner/Release.entitlements @@ -0,0 +1,16 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.device.camera + + com.apple.security.device.microphone + + com.apple.security.network.client + + com.apple.security.network.server + + + diff --git a/example/macos/RunnerTests/RunnerTests.swift b/example/macos/RunnerTests/RunnerTests.swift new file mode 100644 index 0000000000..5418c9f539 --- /dev/null +++ b/example/macos/RunnerTests/RunnerTests.swift @@ -0,0 +1,12 @@ +import FlutterMacOS +import Cocoa +import XCTest + +class RunnerTests: XCTestCase { + + func testExample() { + // If you add code to the Runner application, consider adding tests here. + // See https://developer.apple.com/documentation/xctest for more information about using XCTest. + } + +} diff --git a/example/pubspec.yaml b/example/pubspec.yaml index fa10d997e2..9c5d1492b5 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -1,33 +1,36 @@ name: flutter_webrtc_example description: Demonstrates how to use the webrtc plugin. version: 1.0.0 +publish_to: none +environment: + sdk: ">=3.3.0 <4.0.0" dependencies: - flutter: - sdk: flutter - # The following adds the Cupertino Icons font to your application. # Use with the CupertinoIcons class for iOS style icons. - cupertino_icons: ^1.0.0 - + cupertino_icons: ^1.0.2 + flutter: + sdk: flutter + flutter_background: ^1.0.0 flutter_webrtc: path: ../ - # Required for MediaRecorder example - path_provider: + gallery_saver_plus: 3.2.4 + path_provider: ^2.0.2 + permission_handler: ^11.3.1 + sdp_transform: ^0.3.2 dev_dependencies: flutter_test: sdk: flutter - pedantic: ^1.9.0 + pedantic: ^1.11.0 # For information on the generic Dart part of this file, see the # following page: https://www.dartlang.org/tools/pub/pubspec # The following section is specific to Flutter. flutter: - # The following line ensures that the Material Icons font is # included with your application, so that you can use the icons in # the material Icons class. @@ -61,5 +64,5 @@ flutter: # - asset: fonts/TrajanPro_Bold.ttf # weight: 700 # - # For details regarding fonts from package dependencies, + # For details regarding fonts from package dependencies, # see https://flutter.io/custom-fonts/#from-packages diff --git a/example/scripts/add-line.py b/example/scripts/add-line.py deleted file mode 100755 index b5a16e3e4b..0000000000 --- a/example/scripts/add-line.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/python -# -*- coding: UTF-8 -*- - -import sys -import getopt -import re - - -def findLine(pattern, fp): - line = fp.readline() - line_number = 1 - while line: - #print("Line {}: {}".format(line_number, line.strip())) - if pattern in line: - return line_number - line = fp.readline() - line_number += 1 - return -1 - -def insertBefore(filename, pattern, text): - with open(filename, 'r+') as fp: - line_number = findLine(pattern, fp) - if(line_number > 0): - print 'Insert', text,'to line', line_number - fp.seek(0) - lines = fp.readlines() - fp.seek(0) - lines.insert(line_number - 1, text + '\n') - fp.writelines(lines) - return - print 'pattern',text,'not found!' - -def replaceText(filename, pattern, text): - with open(filename, 'r') as fp: - lines = fp.read() - fp.close() - lines = (re.sub(pattern, text, lines)) - print 'Replace', pattern ,'to', text - fp = open(filename, 'w') - fp.write(lines) - fp.close() - -def main(argv): - inputfile = '' - string = '' - text = '' - replace = False - try: - opts, args = getopt.getopt(argv, "hi:s:t:r") - except getopt.GetoptError: - print 'add-line.py -i -s -t ' - sys.exit(2) - for opt, arg in opts: - if opt == '-h': - print 'add-line.py -i -s -t ' - sys.exit() - elif opt in ("-i"): - inputfile = arg - elif opt in ("-s"): - string = arg - elif opt in ("-t"): - text = arg - elif opt in ("-r"): - replace = True - if(replace): - replaceText(inputfile, string, text) - else: - insertBefore(inputfile, string, text) - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/example/scripts/project_tools.sh b/example/scripts/project_tools.sh deleted file mode 100755 index 355266f7e5..0000000000 --- a/example/scripts/project_tools.sh +++ /dev/null @@ -1,88 +0,0 @@ -#!/bin/bash - -FLUTTER_APP_FOLDER=$(cd `dirname $0`/../; pwd) -FLUTTER_APP_ORG=com.cloudwebrtc.flutter-flutter-example -FLUTTER_APP_PROJECT_NAME=flutter_webrtc_example -CMD=$1 - -function cleanup() { - echo "Cleanup project [$FLUTTER_APP_PROJECT_NAME] files ..." - cd $FLUTTER_APP_FOLDER - rm -rf android build *.iml ios pubspec.lock test .flutter-plugins .metadata .packages .idea macos web -} - -function create() { - cd $FLUTTER_APP_FOLDER - if [ ! -d "ios" ] && [ ! -d "android" ] && [ ! -d "macos" ]; then - echo "Create flutter project: name=$FLUTTER_APP_PROJECT_NAME, org=$FLUTTER_APP_ORG ..." - flutter config --enable-macos-desktop - flutter config --enable-web - flutter create --android-language java --androidx --ios-language objc --project-name $FLUTTER_APP_PROJECT_NAME --org $FLUTTER_APP_ORG . - add_permission_label - else - echo "Project [$FLUTTER_APP_PROJECT_NAME] already exists!" - fi -} - -function add_permission_label() { - cd $FLUTTER_APP_FOLDER/scripts - echo "" - echo "Add permission labels to iOS." - echo "" - python add-line.py -i ../ios/Runner/Info.plist -s 'UILaunchStoryboardName' -t ' NSCameraUsageDescription' - python add-line.py -i ../ios/Runner/Info.plist -s 'UILaunchStoryboardName' -t ' $(PRODUCT_NAME) Camera Usage!' - python add-line.py -i ../ios/Runner/Info.plist -s 'UILaunchStoryboardName' -t ' NSMicrophoneUsageDescription' - python add-line.py -i ../ios/Runner/Info.plist -s 'UILaunchStoryboardName' -t ' $(PRODUCT_NAME) Microphone Usage!' - python add-line.py -i ../ios/Podfile -s "# platform :ios, '9.0'" -t "platform :ios, '10.0'" -r - echo "" - echo "Add permission labels to AndroidManifest.xml." - echo "" - python add-line.py -i ../android/app/build.gradle -s 'minSdkVersion 16' -t 'minSdkVersion 18' -r - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' - echo "" - echo "Add permission labels to macOS." - echo "" - python add-line.py -i ../macos/Runner/Info.plist -s 'CFBundleShortVersionString' -t ' NSCameraUsageDescription' - python add-line.py -i ../macos/Runner/Info.plist -s 'CFBundleShortVersionString' -t ' $(PRODUCT_NAME) Camera Usage!' - python add-line.py -i ../macos/Runner/Info.plist -s 'CFBundleShortVersionString' -t ' NSMicrophoneUsageDescription' - python add-line.py -i ../macos/Runner/Info.plist -s 'CFBundleShortVersionString' -t ' $(PRODUCT_NAME) Microphone Usage!' - - python add-line.py -i ../macos/Runner/DebugProfile.entitlements -s '' -t ' com.apple.security.device.camera' - python add-line.py -i ../macos/Runner/DebugProfile.entitlements -s '' -t ' ' - python add-line.py -i ../macos/Runner/DebugProfile.entitlements -s '' -t ' com.apple.security.device.microphone' - python add-line.py -i ../macos/Runner/DebugProfile.entitlements -s '' -t ' ' - python add-line.py -i ../macos/Runner/DebugProfile.entitlements -s '' -t ' com.apple.security.network.client' - python add-line.py -i ../macos/Runner/DebugProfile.entitlements -s '' -t ' ' - - python add-line.py -i ../macos/Runner/Release.entitlements -s '' -t ' com.apple.security.device.camera' - python add-line.py -i ../macos/Runner/Release.entitlements -s '' -t ' ' - python add-line.py -i ../macos/Runner/Release.entitlements -s '' -t ' com.apple.security.device.microphone' - python add-line.py -i ../macos/Runner/Release.entitlements -s '' -t ' ' - python add-line.py -i ../macos/Runner/Release.entitlements -s '' -t ' com.apple.security.network.client' - python add-line.py -i ../macos/Runner/Release.entitlements -s '' -t ' ' -} - -if [ "$CMD" == "create" ]; -then - create -fi - -if [ "$CMD" == "cleanup" ]; -then - cleanup -fi - -if [ "$CMD" == "add_permission" ]; -then - add_permission_label -fi - -if [ ! -n "$1" ] ;then - echo "Usage: ./project_tools.sh 'create' | 'cleanup'" -fi diff --git a/example/test/widget_test.dart b/example/test/widget_test.dart new file mode 100644 index 0000000000..ec7c1a67b5 --- /dev/null +++ b/example/test/widget_test.dart @@ -0,0 +1,30 @@ +// This is a basic Flutter widget test. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility in the flutter_test package. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import 'package:flutter_webrtc_example/main.dart'; + +void main() { + testWidgets('Counter increments smoke test', (WidgetTester tester) async { + // Build our app and trigger a frame. + await tester.pumpWidget(MyApp()); + + // Verify that our counter starts at 0. + expect(find.text('0'), findsOneWidget); + expect(find.text('1'), findsNothing); + + // Tap the '+' icon and trigger a frame. + await tester.tap(find.byIcon(Icons.add)); + await tester.pump(); + + // Verify that our counter has incremented. + expect(find.text('0'), findsNothing); + expect(find.text('1'), findsOneWidget); + }); +} diff --git a/example/web/e2ee.worker.dart.js b/example/web/e2ee.worker.dart.js new file mode 100644 index 0000000000..546a5faa8a --- /dev/null +++ b/example/web/e2ee.worker.dart.js @@ -0,0 +1,9907 @@ +// Generated by dart2js (NullSafetyMode.sound, csp, intern-composite-values), the Dart to JavaScript compiler version: 3.7.0. +// The code supports the following hooks: +// dartPrint(message): +// if this function is defined it is called instead of the Dart [print] +// method. +// +// dartMainRunner(main, args): +// if this function is defined, the Dart [main] method will not be invoked +// directly. Instead, a closure that will invoke [main], and its arguments +// [args] is passed to [dartMainRunner]. +// +// dartDeferredLibraryLoader(uri, successCallback, errorCallback, loadId, loadPriority): +// if this function is defined, it will be called when a deferred library +// is loaded. It should load and eval the javascript of `uri`, and call +// successCallback. If it fails to do so, it should call errorCallback with +// an error. The loadId argument is the deferred import that resulted in +// this uri being loaded. The loadPriority argument is an arbitrary argument +// string forwarded from the 'dart2js:load-priority' pragma option. +// dartDeferredLibraryMultiLoader(uris, successCallback, errorCallback, loadId, loadPriority): +// if this function is defined, it will be called when a deferred library +// is loaded. It should load and eval the javascript of every URI in `uris`, +// and call successCallback. If it fails to do so, it should call +// errorCallback with an error. The loadId argument is the deferred import +// that resulted in this uri being loaded. The loadPriority argument is an +// arbitrary argument string forwarded from the 'dart2js:load-priority' +// pragma option. +// +// dartCallInstrumentation(id, qualifiedName): +// if this function is defined, it will be called at each entry of a +// method or constructor. Used only when compiling programs with +// --experiment-call-instrumentation. +(function dartProgram() { + function copyProperties(from, to) { + var keys = Object.keys(from); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + to[key] = from[key]; + } + } + function mixinPropertiesHard(from, to) { + var keys = Object.keys(from); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (!to.hasOwnProperty(key)) { + to[key] = from[key]; + } + } + } + function mixinPropertiesEasy(from, to) { + Object.assign(to, from); + } + var supportsDirectProtoAccess = function() { + var cls = function() { + }; + cls.prototype = {p: {}}; + var object = new cls(); + if (!(Object.getPrototypeOf(object) && Object.getPrototypeOf(object).p === cls.prototype.p)) + return false; + try { + if (typeof navigator != "undefined" && typeof navigator.userAgent == "string" && navigator.userAgent.indexOf("Chrome/") >= 0) + return true; + if (typeof version == "function" && version.length == 0) { + var v = version(); + if (/^\d+\.\d+\.\d+\.\d+$/.test(v)) + return true; + } + } catch (_) { + } + return false; + }(); + function inherit(cls, sup) { + cls.prototype.constructor = cls; + cls.prototype["$is" + cls.name] = cls; + if (sup != null) { + if (supportsDirectProtoAccess) { + Object.setPrototypeOf(cls.prototype, sup.prototype); + return; + } + var clsPrototype = Object.create(sup.prototype); + copyProperties(cls.prototype, clsPrototype); + cls.prototype = clsPrototype; + } + } + function inheritMany(sup, classes) { + for (var i = 0; i < classes.length; i++) { + inherit(classes[i], sup); + } + } + function mixinEasy(cls, mixin) { + mixinPropertiesEasy(mixin.prototype, cls.prototype); + cls.prototype.constructor = cls; + } + function mixinHard(cls, mixin) { + mixinPropertiesHard(mixin.prototype, cls.prototype); + cls.prototype.constructor = cls; + } + function lazy(holder, name, getterName, initializer) { + var uninitializedSentinel = holder; + holder[name] = uninitializedSentinel; + holder[getterName] = function() { + if (holder[name] === uninitializedSentinel) { + holder[name] = initializer(); + } + holder[getterName] = function() { + return this[name]; + }; + return holder[name]; + }; + } + function lazyFinal(holder, name, getterName, initializer) { + var uninitializedSentinel = holder; + holder[name] = uninitializedSentinel; + holder[getterName] = function() { + if (holder[name] === uninitializedSentinel) { + var value = initializer(); + if (holder[name] !== uninitializedSentinel) { + A.throwLateFieldADI(name); + } + holder[name] = value; + } + var finalValue = holder[name]; + holder[getterName] = function() { + return finalValue; + }; + return finalValue; + }; + } + function makeConstList(list) { + list.$flags = 7; + return list; + } + function convertToFastObject(properties) { + function t() { + } + t.prototype = properties; + new t(); + return properties; + } + function convertAllToFastObject(arrayOfObjects) { + for (var i = 0; i < arrayOfObjects.length; ++i) { + convertToFastObject(arrayOfObjects[i]); + } + } + var functionCounter = 0; + function instanceTearOffGetter(isIntercepted, parameters) { + var cache = null; + return isIntercepted ? function(receiver) { + if (cache === null) + cache = A.closureFromTearOff(parameters); + return new cache(receiver, this); + } : function() { + if (cache === null) + cache = A.closureFromTearOff(parameters); + return new cache(this, null); + }; + } + function staticTearOffGetter(parameters) { + var cache = null; + return function() { + if (cache === null) + cache = A.closureFromTearOff(parameters).prototype; + return cache; + }; + } + var typesOffset = 0; + function tearOffParameters(container, isStatic, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, needsDirectAccess) { + if (typeof funType == "number") { + funType += typesOffset; + } + return {co: container, iS: isStatic, iI: isIntercepted, rC: requiredParameterCount, dV: optionalParameterDefaultValues, cs: callNames, fs: funsOrNames, fT: funType, aI: applyIndex || 0, nDA: needsDirectAccess}; + } + function installStaticTearOff(holder, getterName, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex) { + var parameters = tearOffParameters(holder, true, false, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, false); + var getterFunction = staticTearOffGetter(parameters); + holder[getterName] = getterFunction; + } + function installInstanceTearOff(prototype, getterName, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, needsDirectAccess) { + isIntercepted = !!isIntercepted; + var parameters = tearOffParameters(prototype, false, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, !!needsDirectAccess); + var getterFunction = instanceTearOffGetter(isIntercepted, parameters); + prototype[getterName] = getterFunction; + } + function setOrUpdateInterceptorsByTag(newTags) { + var tags = init.interceptorsByTag; + if (!tags) { + init.interceptorsByTag = newTags; + return; + } + copyProperties(newTags, tags); + } + function setOrUpdateLeafTags(newTags) { + var tags = init.leafTags; + if (!tags) { + init.leafTags = newTags; + return; + } + copyProperties(newTags, tags); + } + function updateTypes(newTypes) { + var types = init.types; + var length = types.length; + types.push.apply(types, newTypes); + return length; + } + function updateHolder(holder, newHolder) { + copyProperties(newHolder, holder); + return holder; + } + var hunkHelpers = function() { + var mkInstance = function(isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, applyIndex) { + return function(container, getterName, name, funType) { + return installInstanceTearOff(container, getterName, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, [name], funType, applyIndex, false); + }; + }, + mkStatic = function(requiredParameterCount, optionalParameterDefaultValues, callNames, applyIndex) { + return function(container, getterName, name, funType) { + return installStaticTearOff(container, getterName, requiredParameterCount, optionalParameterDefaultValues, callNames, [name], funType, applyIndex); + }; + }; + return {inherit: inherit, inheritMany: inheritMany, mixin: mixinEasy, mixinHard: mixinHard, installStaticTearOff: installStaticTearOff, installInstanceTearOff: installInstanceTearOff, _instance_0u: mkInstance(0, 0, null, ["call$0"], 0), _instance_1u: mkInstance(0, 1, null, ["call$1"], 0), _instance_2u: mkInstance(0, 2, null, ["call$2"], 0), _instance_0i: mkInstance(1, 0, null, ["call$0"], 0), _instance_1i: mkInstance(1, 1, null, ["call$1"], 0), _instance_2i: mkInstance(1, 2, null, ["call$2"], 0), _static_0: mkStatic(0, null, ["call$0"], 0), _static_1: mkStatic(1, null, ["call$1"], 0), _static_2: mkStatic(2, null, ["call$2"], 0), makeConstList: makeConstList, lazy: lazy, lazyFinal: lazyFinal, updateHolder: updateHolder, convertToFastObject: convertToFastObject, updateTypes: updateTypes, setOrUpdateInterceptorsByTag: setOrUpdateInterceptorsByTag, setOrUpdateLeafTags: setOrUpdateLeafTags}; + }(); + function initializeDeferredHunk(hunk) { + typesOffset = init.types.length; + hunk(hunkHelpers, init, holders, $); + } + var J = { + makeDispatchRecord(interceptor, proto, extension, indexability) { + return {i: interceptor, p: proto, e: extension, x: indexability}; + }, + getNativeInterceptor(object) { + var proto, objectProto, $constructor, interceptor, t1, + record = object[init.dispatchPropertyName]; + if (record == null) + if ($.initNativeDispatchFlag == null) { + A.initNativeDispatch(); + record = object[init.dispatchPropertyName]; + } + if (record != null) { + proto = record.p; + if (false === proto) + return record.i; + if (true === proto) + return object; + objectProto = Object.getPrototypeOf(object); + if (proto === objectProto) + return record.i; + if (record.e === objectProto) + throw A.wrapException(A.UnimplementedError$("Return interceptor for " + A.S(proto(object, record)))); + } + $constructor = object.constructor; + if ($constructor == null) + interceptor = null; + else { + t1 = $._JS_INTEROP_INTERCEPTOR_TAG; + if (t1 == null) + t1 = $._JS_INTEROP_INTERCEPTOR_TAG = init.getIsolateTag("_$dart_js"); + interceptor = $constructor[t1]; + } + if (interceptor != null) + return interceptor; + interceptor = A.lookupAndCacheInterceptor(object); + if (interceptor != null) + return interceptor; + if (typeof object == "function") + return B.JavaScriptFunction_methods; + proto = Object.getPrototypeOf(object); + if (proto == null) + return B.PlainJavaScriptObject_methods; + if (proto === Object.prototype) + return B.PlainJavaScriptObject_methods; + if (typeof $constructor == "function") { + t1 = $._JS_INTEROP_INTERCEPTOR_TAG; + if (t1 == null) + t1 = $._JS_INTEROP_INTERCEPTOR_TAG = init.getIsolateTag("_$dart_js"); + Object.defineProperty($constructor, t1, {value: B.UnknownJavaScriptObject_methods, enumerable: false, writable: true, configurable: true}); + return B.UnknownJavaScriptObject_methods; + } + return B.UnknownJavaScriptObject_methods; + }, + JSArray_JSArray$fixed($length, $E) { + if ($length < 0 || $length > 4294967295) + throw A.wrapException(A.RangeError$range($length, 0, 4294967295, "length", null)); + return J.JSArray_JSArray$markFixed(new Array($length), $E); + }, + JSArray_JSArray$markFixed(allocation, $E) { + var t1 = A._setArrayType(allocation, $E._eval$1("JSArray<0>")); + t1.$flags = 1; + return t1; + }, + getInterceptor$(receiver) { + if (typeof receiver == "number") { + if (Math.floor(receiver) == receiver) + return J.JSInt.prototype; + return J.JSNumNotInt.prototype; + } + if (typeof receiver == "string") + return J.JSString.prototype; + if (receiver == null) + return J.JSNull.prototype; + if (typeof receiver == "boolean") + return J.JSBool.prototype; + if (Array.isArray(receiver)) + return J.JSArray.prototype; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + getInterceptor$asx(receiver) { + if (typeof receiver == "string") + return J.JSString.prototype; + if (receiver == null) + return receiver; + if (Array.isArray(receiver)) + return J.JSArray.prototype; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + getInterceptor$ax(receiver) { + if (receiver == null) + return receiver; + if (Array.isArray(receiver)) + return J.JSArray.prototype; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + getInterceptor$x(receiver) { + if (receiver == null) + return receiver; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + get$buffer$x(receiver) { + return J.getInterceptor$x(receiver).get$buffer(receiver); + }, + get$hashCode$(receiver) { + return J.getInterceptor$(receiver).get$hashCode(receiver); + }, + get$iterator$ax(receiver) { + return J.getInterceptor$ax(receiver).get$iterator(receiver); + }, + get$length$asx(receiver) { + return J.getInterceptor$asx(receiver).get$length(receiver); + }, + get$runtimeType$(receiver) { + return J.getInterceptor$(receiver).get$runtimeType(receiver); + }, + $eq$(receiver, a0) { + if (receiver == null) + return a0 == null; + if (typeof receiver != "object") + return a0 != null && receiver === a0; + return J.getInterceptor$(receiver).$eq(receiver, a0); + }, + $index$asx(receiver, a0) { + if (typeof a0 === "number") + if (Array.isArray(receiver) || typeof receiver == "string" || A.isJsIndexable(receiver, receiver[init.dispatchPropertyName])) + if (a0 >>> 0 === a0 && a0 < receiver.length) + return receiver[a0]; + return J.getInterceptor$asx(receiver).$index(receiver, a0); + }, + _setInt8$2$x(receiver, a0, a1) { + return J.getInterceptor$x(receiver)._setInt8$2(receiver, a0, a1); + }, + add$1$ax(receiver, a0) { + return J.getInterceptor$ax(receiver).add$1(receiver, a0); + }, + asUint8List$0$x(receiver) { + return J.getInterceptor$x(receiver).asUint8List$0(receiver); + }, + asUint8List$2$x(receiver, a0, a1) { + return J.getInterceptor$x(receiver).asUint8List$2(receiver, a0, a1); + }, + elementAt$1$ax(receiver, a0) { + return J.getInterceptor$ax(receiver).elementAt$1(receiver, a0); + }, + map$1$1$ax(receiver, a0, $T1) { + return J.getInterceptor$ax(receiver).map$1$1(receiver, a0, $T1); + }, + noSuchMethod$1$(receiver, a0) { + return J.getInterceptor$(receiver).noSuchMethod$1(receiver, a0); + }, + toString$0$(receiver) { + return J.getInterceptor$(receiver).toString$0(receiver); + }, + Interceptor: function Interceptor() { + }, + JSBool: function JSBool() { + }, + JSNull: function JSNull() { + }, + JavaScriptObject: function JavaScriptObject() { + }, + LegacyJavaScriptObject: function LegacyJavaScriptObject() { + }, + PlainJavaScriptObject: function PlainJavaScriptObject() { + }, + UnknownJavaScriptObject: function UnknownJavaScriptObject() { + }, + JavaScriptFunction: function JavaScriptFunction() { + }, + JavaScriptBigInt: function JavaScriptBigInt() { + }, + JavaScriptSymbol: function JavaScriptSymbol() { + }, + JSArray: function JSArray(t0) { + this.$ti = t0; + }, + JSUnmodifiableArray: function JSUnmodifiableArray(t0) { + this.$ti = t0; + }, + ArrayIterator: function ArrayIterator(t0, t1, t2) { + var _ = this; + _._iterable = t0; + _._length = t1; + _._index = 0; + _._current = null; + _.$ti = t2; + }, + JSNumber: function JSNumber() { + }, + JSInt: function JSInt() { + }, + JSNumNotInt: function JSNumNotInt() { + }, + JSString: function JSString() { + } + }, + A = {JS_CONST: function JS_CONST() { + }, + SystemHash_combine(hash, value) { + hash = hash + value & 536870911; + hash = hash + ((hash & 524287) << 10) & 536870911; + return hash ^ hash >>> 6; + }, + SystemHash_finish(hash) { + hash = hash + ((hash & 67108863) << 3) & 536870911; + hash ^= hash >>> 11; + return hash + ((hash & 16383) << 15) & 536870911; + }, + checkNotNullable(value, $name, $T) { + return value; + }, + isToStringVisiting(object) { + var t1, i; + for (t1 = $.toStringVisiting.length, i = 0; i < t1; ++i) + if (object === $.toStringVisiting[i]) + return true; + return false; + }, + MappedIterable_MappedIterable(iterable, $function, $S, $T) { + if (type$.EfficientLengthIterable_dynamic._is(iterable)) + return new A.EfficientLengthMappedIterable(iterable, $function, $S._eval$1("@<0>")._bind$1($T)._eval$1("EfficientLengthMappedIterable<1,2>")); + return new A.MappedIterable(iterable, $function, $S._eval$1("@<0>")._bind$1($T)._eval$1("MappedIterable<1,2>")); + }, + _CopyingBytesBuilder: function _CopyingBytesBuilder(t0) { + this.__internal$_length = 0; + this._buffer = t0; + }, + LateError: function LateError(t0) { + this._message = t0; + }, + SentinelValue: function SentinelValue() { + }, + EfficientLengthIterable: function EfficientLengthIterable() { + }, + ListIterable: function ListIterable() { + }, + ListIterator: function ListIterator(t0, t1, t2) { + var _ = this; + _.__internal$_iterable = t0; + _.__internal$_length = t1; + _.__internal$_index = 0; + _.__internal$_current = null; + _.$ti = t2; + }, + MappedIterable: function MappedIterable(t0, t1, t2) { + this.__internal$_iterable = t0; + this._f = t1; + this.$ti = t2; + }, + EfficientLengthMappedIterable: function EfficientLengthMappedIterable(t0, t1, t2) { + this.__internal$_iterable = t0; + this._f = t1; + this.$ti = t2; + }, + MappedIterator: function MappedIterator(t0, t1, t2) { + var _ = this; + _.__internal$_current = null; + _._iterator = t0; + _._f = t1; + _.$ti = t2; + }, + MappedListIterable: function MappedListIterable(t0, t1, t2) { + this._source = t0; + this._f = t1; + this.$ti = t2; + }, + WhereIterable: function WhereIterable(t0, t1, t2) { + this.__internal$_iterable = t0; + this._f = t1; + this.$ti = t2; + }, + WhereIterator: function WhereIterator(t0, t1, t2) { + this._iterator = t0; + this._f = t1; + this.$ti = t2; + }, + FixedLengthListMixin: function FixedLengthListMixin() { + }, + Symbol: function Symbol(t0) { + this.__internal$_name = t0; + }, + unminifyOrTag(rawClassName) { + var preserved = init.mangledGlobalNames[rawClassName]; + if (preserved != null) + return preserved; + return rawClassName; + }, + isJsIndexable(object, record) { + var result; + if (record != null) { + result = record.x; + if (result != null) + return result; + } + return type$.JavaScriptIndexingBehavior_dynamic._is(object); + }, + S(value) { + var result; + if (typeof value == "string") + return value; + if (typeof value == "number") { + if (value !== 0) + return "" + value; + } else if (true === value) + return "true"; + else if (false === value) + return "false"; + else if (value == null) + return "null"; + result = J.toString$0$(value); + return result; + }, + Primitives_objectHashCode(object) { + var hash, + property = $.Primitives__identityHashCodeProperty; + if (property == null) + property = $.Primitives__identityHashCodeProperty = Symbol("identityHashCode"); + hash = object[property]; + if (hash == null) { + hash = Math.random() * 0x3fffffff | 0; + object[property] = hash; + } + return hash; + }, + Primitives_objectTypeName(object) { + return A.Primitives__objectTypeNameNewRti(object); + }, + Primitives__objectTypeNameNewRti(object) { + var interceptor, dispatchName, $constructor, constructorName; + if (object instanceof A.Object) + return A._rtiToString(A.instanceType(object), null); + interceptor = J.getInterceptor$(object); + if (interceptor === B.Interceptor_methods || interceptor === B.JavaScriptObject_methods || type$.UnknownJavaScriptObject._is(object)) { + dispatchName = B.C_JS_CONST(object); + if (dispatchName !== "Object" && dispatchName !== "") + return dispatchName; + $constructor = object.constructor; + if (typeof $constructor == "function") { + constructorName = $constructor.name; + if (typeof constructorName == "string" && constructorName !== "Object" && constructorName !== "") + return constructorName; + } + } + return A._rtiToString(A.instanceType(object), null); + }, + Primitives_safeToString(object) { + if (typeof object == "number" || A._isBool(object)) + return J.toString$0$(object); + if (typeof object == "string") + return JSON.stringify(object); + if (object instanceof A.Closure) + return object.toString$0(0); + return "Instance of '" + A.Primitives_objectTypeName(object) + "'"; + }, + Primitives_stringFromNativeUint8List(charCodes, start, end) { + var i, result, i0, chunkEnd; + if (end <= 500 && start === 0 && end === charCodes.length) + return String.fromCharCode.apply(null, charCodes); + for (i = start, result = ""; i < end; i = i0) { + i0 = i + 500; + chunkEnd = i0 < end ? i0 : end; + result += String.fromCharCode.apply(null, charCodes.subarray(i, chunkEnd)); + } + return result; + }, + Primitives_lazyAsJsDate(receiver) { + if (receiver.date === void 0) + receiver.date = new Date(receiver._value); + return receiver.date; + }, + Primitives_getYear(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCFullYear() + 0 : A.Primitives_lazyAsJsDate(receiver).getFullYear() + 0; + }, + Primitives_getMonth(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCMonth() + 1 : A.Primitives_lazyAsJsDate(receiver).getMonth() + 1; + }, + Primitives_getDay(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCDate() + 0 : A.Primitives_lazyAsJsDate(receiver).getDate() + 0; + }, + Primitives_getHours(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCHours() + 0 : A.Primitives_lazyAsJsDate(receiver).getHours() + 0; + }, + Primitives_getMinutes(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCMinutes() + 0 : A.Primitives_lazyAsJsDate(receiver).getMinutes() + 0; + }, + Primitives_getSeconds(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCSeconds() + 0 : A.Primitives_lazyAsJsDate(receiver).getSeconds() + 0; + }, + Primitives_getMilliseconds(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCMilliseconds() + 0 : A.Primitives_lazyAsJsDate(receiver).getMilliseconds() + 0; + }, + Primitives_functionNoSuchMethod($function, positionalArguments, namedArguments) { + var $arguments, namedArgumentList, t1 = {}; + t1.argumentCount = 0; + $arguments = []; + namedArgumentList = []; + t1.argumentCount = positionalArguments.length; + B.JSArray_methods.addAll$1($arguments, positionalArguments); + t1.names = ""; + if (namedArguments != null && namedArguments.__js_helper$_length !== 0) + namedArguments.forEach$1(0, new A.Primitives_functionNoSuchMethod_closure(t1, namedArgumentList, $arguments)); + return J.noSuchMethod$1$($function, new A.JSInvocationMirror(B.Symbol_call, 0, $arguments, namedArgumentList, 0)); + }, + Primitives_applyFunction($function, positionalArguments, namedArguments) { + var t1, argumentCount, jsStub; + if (Array.isArray(positionalArguments)) + t1 = namedArguments == null || namedArguments.__js_helper$_length === 0; + else + t1 = false; + if (t1) { + argumentCount = positionalArguments.length; + if (argumentCount === 0) { + if (!!$function.call$0) + return $function.call$0(); + } else if (argumentCount === 1) { + if (!!$function.call$1) + return $function.call$1(positionalArguments[0]); + } else if (argumentCount === 2) { + if (!!$function.call$2) + return $function.call$2(positionalArguments[0], positionalArguments[1]); + } else if (argumentCount === 3) { + if (!!$function.call$3) + return $function.call$3(positionalArguments[0], positionalArguments[1], positionalArguments[2]); + } else if (argumentCount === 4) { + if (!!$function.call$4) + return $function.call$4(positionalArguments[0], positionalArguments[1], positionalArguments[2], positionalArguments[3]); + } else if (argumentCount === 5) + if (!!$function.call$5) + return $function.call$5(positionalArguments[0], positionalArguments[1], positionalArguments[2], positionalArguments[3], positionalArguments[4]); + jsStub = $function["call" + "$" + argumentCount]; + if (jsStub != null) + return jsStub.apply($function, positionalArguments); + } + return A.Primitives__generalApplyFunction($function, positionalArguments, namedArguments); + }, + Primitives__generalApplyFunction($function, positionalArguments, namedArguments) { + var defaultValuesClosure, t1, defaultValues, interceptor, jsFunction, maxArguments, missingDefaults, keys, _i, defaultValue, used, key, + $arguments = Array.isArray(positionalArguments) ? positionalArguments : A.List_List$of(positionalArguments, true, type$.dynamic), + argumentCount = $arguments.length, + requiredParameterCount = $function.$requiredArgCount; + if (argumentCount < requiredParameterCount) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + defaultValuesClosure = $function.$defaultValues; + t1 = defaultValuesClosure == null; + defaultValues = !t1 ? defaultValuesClosure() : null; + interceptor = J.getInterceptor$($function); + jsFunction = interceptor["call*"]; + if (typeof jsFunction == "string") + jsFunction = interceptor[jsFunction]; + if (t1) { + if (namedArguments != null && namedArguments.__js_helper$_length !== 0) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + if (argumentCount === requiredParameterCount) + return jsFunction.apply($function, $arguments); + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + } + if (Array.isArray(defaultValues)) { + if (namedArguments != null && namedArguments.__js_helper$_length !== 0) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + maxArguments = requiredParameterCount + defaultValues.length; + if (argumentCount > maxArguments) + return A.Primitives_functionNoSuchMethod($function, $arguments, null); + if (argumentCount < maxArguments) { + missingDefaults = defaultValues.slice(argumentCount - requiredParameterCount); + if ($arguments === positionalArguments) + $arguments = A.List_List$of($arguments, true, type$.dynamic); + B.JSArray_methods.addAll$1($arguments, missingDefaults); + } + return jsFunction.apply($function, $arguments); + } else { + if (argumentCount > requiredParameterCount) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + if ($arguments === positionalArguments) + $arguments = A.List_List$of($arguments, true, type$.dynamic); + keys = Object.keys(defaultValues); + if (namedArguments == null) + for (t1 = keys.length, _i = 0; _i < keys.length; keys.length === t1 || (0, A.throwConcurrentModificationError)(keys), ++_i) { + defaultValue = defaultValues[A._asString(keys[_i])]; + if (B.C__Required === defaultValue) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + B.JSArray_methods.add$1($arguments, defaultValue); + } + else { + for (t1 = keys.length, used = 0, _i = 0; _i < keys.length; keys.length === t1 || (0, A.throwConcurrentModificationError)(keys), ++_i) { + key = A._asString(keys[_i]); + if (namedArguments.containsKey$1(key)) { + ++used; + B.JSArray_methods.add$1($arguments, namedArguments.$index(0, key)); + } else { + defaultValue = defaultValues[key]; + if (B.C__Required === defaultValue) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + B.JSArray_methods.add$1($arguments, defaultValue); + } + } + if (used !== namedArguments.__js_helper$_length) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + } + return jsFunction.apply($function, $arguments); + } + }, + Primitives_extractStackTrace(error) { + var jsError = error.$thrownJsError; + if (jsError == null) + return null; + return A.getTraceFromException(jsError); + }, + Primitives_trySetStackTrace(error, stackTrace) { + var jsError; + if (error.$thrownJsError == null) { + jsError = A.wrapException(error); + error.$thrownJsError = jsError; + jsError.stack = stackTrace.toString$0(0); + } + }, + iae(argument) { + throw A.wrapException(A.argumentErrorValue(argument)); + }, + ioore(receiver, index) { + if (receiver == null) + J.get$length$asx(receiver); + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + }, + diagnoseIndexError(indexable, index) { + var $length, _s5_ = "index"; + if (!A._isInt(index)) + return new A.ArgumentError(true, index, _s5_, null); + $length = A._asInt(J.get$length$asx(indexable)); + if (index < 0 || index >= $length) + return A.IndexError$withLength(index, $length, indexable, _s5_); + return A.RangeError$value(index, _s5_); + }, + diagnoseRangeError(start, end, $length) { + if (start < 0 || start > $length) + return A.RangeError$range(start, 0, $length, "start", null); + if (end != null) + if (end < start || end > $length) + return A.RangeError$range(end, start, $length, "end", null); + return new A.ArgumentError(true, end, "end", null); + }, + argumentErrorValue(object) { + return new A.ArgumentError(true, object, null, null); + }, + wrapException(ex) { + return A.initializeExceptionWrapper(new Error(), ex); + }, + initializeExceptionWrapper(wrapper, ex) { + var t1; + if (ex == null) + ex = new A.TypeError(); + wrapper.dartException = ex; + t1 = A.toStringWrapper; + if ("defineProperty" in Object) { + Object.defineProperty(wrapper, "message", {get: t1}); + wrapper.name = ""; + } else + wrapper.toString = t1; + return wrapper; + }, + toStringWrapper() { + return J.toString$0$(this.dartException); + }, + throwExpression(ex) { + throw A.wrapException(ex); + }, + throwExpressionWithWrapper(ex, wrapper) { + throw A.initializeExceptionWrapper(wrapper, ex); + }, + throwUnsupportedOperation(o, operation, verb) { + var wrapper; + if (operation == null) + operation = 0; + if (verb == null) + verb = 0; + wrapper = Error(); + A.throwExpressionWithWrapper(A._diagnoseUnsupportedOperation(o, operation, verb), wrapper); + }, + _diagnoseUnsupportedOperation(o, encodedOperation, encodedVerb) { + var operation, table, tableLength, index, verb, object, flags, article, adjective; + if (typeof encodedOperation == "string") + operation = encodedOperation; + else { + table = "[]=;add;removeWhere;retainWhere;removeRange;setRange;setInt8;setInt16;setInt32;setUint8;setUint16;setUint32;setFloat32;setFloat64".split(";"); + tableLength = table.length; + index = encodedOperation; + if (index > tableLength) { + encodedVerb = index / tableLength | 0; + index %= tableLength; + } + operation = table[index]; + } + verb = typeof encodedVerb == "string" ? encodedVerb : "modify;remove from;add to".split(";")[encodedVerb]; + object = type$.List_dynamic._is(o) ? "list" : "ByteData"; + flags = o.$flags | 0; + article = "a "; + if ((flags & 4) !== 0) + adjective = "constant "; + else if ((flags & 2) !== 0) { + adjective = "unmodifiable "; + article = "an "; + } else + adjective = (flags & 1) !== 0 ? "fixed-length " : ""; + return new A.UnsupportedError("'" + operation + "': Cannot " + verb + " " + article + adjective + object); + }, + throwConcurrentModificationError(collection) { + throw A.wrapException(A.ConcurrentModificationError$(collection)); + }, + TypeErrorDecoder_extractPattern(message) { + var match, $arguments, argumentsExpr, expr, method, receiver; + message = A.quoteStringForRegExp(message.replace(String({}), "$receiver$")); + match = message.match(/\\\$[a-zA-Z]+\\\$/g); + if (match == null) + match = A._setArrayType([], type$.JSArray_String); + $arguments = match.indexOf("\\$arguments\\$"); + argumentsExpr = match.indexOf("\\$argumentsExpr\\$"); + expr = match.indexOf("\\$expr\\$"); + method = match.indexOf("\\$method\\$"); + receiver = match.indexOf("\\$receiver\\$"); + return new A.TypeErrorDecoder(message.replace(new RegExp("\\\\\\$arguments\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$argumentsExpr\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$expr\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$method\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$receiver\\\\\\$", "g"), "((?:x|[^x])*)"), $arguments, argumentsExpr, expr, method, receiver); + }, + TypeErrorDecoder_provokeCallErrorOn(expression) { + return function($expr$) { + var $argumentsExpr$ = "$arguments$"; + try { + $expr$.$method$($argumentsExpr$); + } catch (e) { + return e.message; + } + }(expression); + }, + TypeErrorDecoder_provokePropertyErrorOn(expression) { + return function($expr$) { + try { + $expr$.$method$; + } catch (e) { + return e.message; + } + }(expression); + }, + JsNoSuchMethodError$(_message, match) { + var t1 = match == null, + t2 = t1 ? null : match.method; + return new A.JsNoSuchMethodError(_message, t2, t1 ? null : match.receiver); + }, + unwrapException(ex) { + var t1; + if (ex == null) + return new A.NullThrownFromJavaScriptException(ex); + if (ex instanceof A.ExceptionAndStackTrace) { + t1 = ex.dartException; + return A.saveStackTrace(ex, t1 == null ? type$.Object._as(t1) : t1); + } + if (typeof ex !== "object") + return ex; + if ("dartException" in ex) + return A.saveStackTrace(ex, ex.dartException); + return A._unwrapNonDartException(ex); + }, + saveStackTrace(ex, error) { + if (type$.Error._is(error)) + if (error.$thrownJsError == null) + error.$thrownJsError = ex; + return error; + }, + _unwrapNonDartException(ex) { + var message, number, ieErrorCode, nsme, notClosure, nullCall, nullLiteralCall, undefCall, undefLiteralCall, nullProperty, undefProperty, undefLiteralProperty, match; + if (!("message" in ex)) + return ex; + message = ex.message; + if ("number" in ex && typeof ex.number == "number") { + number = ex.number; + ieErrorCode = number & 65535; + if ((B.JSInt_methods._shrOtherPositive$1(number, 16) & 8191) === 10) + switch (ieErrorCode) { + case 438: + return A.saveStackTrace(ex, A.JsNoSuchMethodError$(A.S(message) + " (Error " + ieErrorCode + ")", null)); + case 445: + case 5007: + A.S(message); + return A.saveStackTrace(ex, new A.NullError()); + } + } + if (ex instanceof TypeError) { + nsme = $.$get$TypeErrorDecoder_noSuchMethodPattern(); + notClosure = $.$get$TypeErrorDecoder_notClosurePattern(); + nullCall = $.$get$TypeErrorDecoder_nullCallPattern(); + nullLiteralCall = $.$get$TypeErrorDecoder_nullLiteralCallPattern(); + undefCall = $.$get$TypeErrorDecoder_undefinedCallPattern(); + undefLiteralCall = $.$get$TypeErrorDecoder_undefinedLiteralCallPattern(); + nullProperty = $.$get$TypeErrorDecoder_nullPropertyPattern(); + $.$get$TypeErrorDecoder_nullLiteralPropertyPattern(); + undefProperty = $.$get$TypeErrorDecoder_undefinedPropertyPattern(); + undefLiteralProperty = $.$get$TypeErrorDecoder_undefinedLiteralPropertyPattern(); + match = nsme.matchTypeError$1(message); + if (match != null) + return A.saveStackTrace(ex, A.JsNoSuchMethodError$(A._asString(message), match)); + else { + match = notClosure.matchTypeError$1(message); + if (match != null) { + match.method = "call"; + return A.saveStackTrace(ex, A.JsNoSuchMethodError$(A._asString(message), match)); + } else if (nullCall.matchTypeError$1(message) != null || nullLiteralCall.matchTypeError$1(message) != null || undefCall.matchTypeError$1(message) != null || undefLiteralCall.matchTypeError$1(message) != null || nullProperty.matchTypeError$1(message) != null || nullLiteralCall.matchTypeError$1(message) != null || undefProperty.matchTypeError$1(message) != null || undefLiteralProperty.matchTypeError$1(message) != null) { + A._asString(message); + return A.saveStackTrace(ex, new A.NullError()); + } + } + return A.saveStackTrace(ex, new A.UnknownJsTypeError(typeof message == "string" ? message : "")); + } + if (ex instanceof RangeError) { + if (typeof message == "string" && message.indexOf("call stack") !== -1) + return new A.StackOverflowError(); + message = function(ex) { + try { + return String(ex); + } catch (e) { + } + return null; + }(ex); + return A.saveStackTrace(ex, new A.ArgumentError(false, null, null, typeof message == "string" ? message.replace(/^RangeError:\s*/, "") : message)); + } + if (typeof InternalError == "function" && ex instanceof InternalError) + if (typeof message == "string" && message === "too much recursion") + return new A.StackOverflowError(); + return ex; + }, + getTraceFromException(exception) { + var trace; + if (exception instanceof A.ExceptionAndStackTrace) + return exception.stackTrace; + if (exception == null) + return new A._StackTrace(exception); + trace = exception.$cachedTrace; + if (trace != null) + return trace; + trace = new A._StackTrace(exception); + if (typeof exception === "object") + exception.$cachedTrace = trace; + return trace; + }, + objectHashCode(object) { + if (object == null) + return J.get$hashCode$(object); + if (typeof object == "object") + return A.Primitives_objectHashCode(object); + return J.get$hashCode$(object); + }, + fillLiteralMap(keyValuePairs, result) { + var index, index0, index1, + $length = keyValuePairs.length; + for (index = 0; index < $length; index = index1) { + index0 = index + 1; + index1 = index0 + 1; + result.$indexSet(0, keyValuePairs[index], keyValuePairs[index0]); + } + return result; + }, + _invokeClosure(closure, numberOfArguments, arg1, arg2, arg3, arg4) { + type$.Function._as(closure); + switch (A._asInt(numberOfArguments)) { + case 0: + return closure.call$0(); + case 1: + return closure.call$1(arg1); + case 2: + return closure.call$2(arg1, arg2); + case 3: + return closure.call$3(arg1, arg2, arg3); + case 4: + return closure.call$4(arg1, arg2, arg3, arg4); + } + throw A.wrapException(A.Exception_Exception("Unsupported number of arguments for wrapped closure")); + }, + convertDartClosureToJS(closure, arity) { + var $function = closure.$identity; + if (!!$function) + return $function; + $function = A.convertDartClosureToJSUncached(closure, arity); + closure.$identity = $function; + return $function; + }, + convertDartClosureToJSUncached(closure, arity) { + var entry; + switch (arity) { + case 0: + entry = closure.call$0; + break; + case 1: + entry = closure.call$1; + break; + case 2: + entry = closure.call$2; + break; + case 3: + entry = closure.call$3; + break; + case 4: + entry = closure.call$4; + break; + default: + entry = null; + } + if (entry != null) + return entry.bind(closure); + return function(closure, arity, invoke) { + return function(a1, a2, a3, a4) { + return invoke(closure, arity, a1, a2, a3, a4); + }; + }(closure, arity, A._invokeClosure); + }, + Closure_fromTearOff(parameters) { + var $prototype, $constructor, t2, trampoline, applyTrampoline, i, stub, stub0, stubName, stubCallName, + container = parameters.co, + isStatic = parameters.iS, + isIntercepted = parameters.iI, + needsDirectAccess = parameters.nDA, + applyTrampolineIndex = parameters.aI, + funsOrNames = parameters.fs, + callNames = parameters.cs, + $name = funsOrNames[0], + callName = callNames[0], + $function = container[$name], + t1 = parameters.fT; + t1.toString; + $prototype = isStatic ? Object.create(new A.StaticClosure().constructor.prototype) : Object.create(new A.BoundClosure(null, null).constructor.prototype); + $prototype.$initialize = $prototype.constructor; + $constructor = isStatic ? function static_tear_off() { + this.$initialize(); + } : function tear_off(a, b) { + this.$initialize(a, b); + }; + $prototype.constructor = $constructor; + $constructor.prototype = $prototype; + $prototype.$_name = $name; + $prototype.$_target = $function; + t2 = !isStatic; + if (t2) + trampoline = A.Closure_forwardCallTo($name, $function, isIntercepted, needsDirectAccess); + else { + $prototype.$static_name = $name; + trampoline = $function; + } + $prototype.$signature = A.Closure__computeSignatureFunctionNewRti(t1, isStatic, isIntercepted); + $prototype[callName] = trampoline; + for (applyTrampoline = trampoline, i = 1; i < funsOrNames.length; ++i) { + stub = funsOrNames[i]; + if (typeof stub == "string") { + stub0 = container[stub]; + stubName = stub; + stub = stub0; + } else + stubName = ""; + stubCallName = callNames[i]; + if (stubCallName != null) { + if (t2) + stub = A.Closure_forwardCallTo(stubName, stub, isIntercepted, needsDirectAccess); + $prototype[stubCallName] = stub; + } + if (i === applyTrampolineIndex) + applyTrampoline = stub; + } + $prototype["call*"] = applyTrampoline; + $prototype.$requiredArgCount = parameters.rC; + $prototype.$defaultValues = parameters.dV; + return $constructor; + }, + Closure__computeSignatureFunctionNewRti(functionType, isStatic, isIntercepted) { + if (typeof functionType == "number") + return functionType; + if (typeof functionType == "string") { + if (isStatic) + throw A.wrapException("Cannot compute signature for static tearoff."); + return function(recipe, evalOnReceiver) { + return function() { + return evalOnReceiver(this, recipe); + }; + }(functionType, A.BoundClosure_evalRecipe); + } + throw A.wrapException("Error in functionType of tearoff"); + }, + Closure_cspForwardCall(arity, needsDirectAccess, stubName, $function) { + var getReceiver = A.BoundClosure_receiverOf; + switch (needsDirectAccess ? -1 : arity) { + case 0: + return function(entry, receiverOf) { + return function() { + return receiverOf(this)[entry](); + }; + }(stubName, getReceiver); + case 1: + return function(entry, receiverOf) { + return function(a) { + return receiverOf(this)[entry](a); + }; + }(stubName, getReceiver); + case 2: + return function(entry, receiverOf) { + return function(a, b) { + return receiverOf(this)[entry](a, b); + }; + }(stubName, getReceiver); + case 3: + return function(entry, receiverOf) { + return function(a, b, c) { + return receiverOf(this)[entry](a, b, c); + }; + }(stubName, getReceiver); + case 4: + return function(entry, receiverOf) { + return function(a, b, c, d) { + return receiverOf(this)[entry](a, b, c, d); + }; + }(stubName, getReceiver); + case 5: + return function(entry, receiverOf) { + return function(a, b, c, d, e) { + return receiverOf(this)[entry](a, b, c, d, e); + }; + }(stubName, getReceiver); + default: + return function(f, receiverOf) { + return function() { + return f.apply(receiverOf(this), arguments); + }; + }($function, getReceiver); + } + }, + Closure_forwardCallTo(stubName, $function, isIntercepted, needsDirectAccess) { + if (isIntercepted) + return A.Closure_forwardInterceptedCallTo(stubName, $function, needsDirectAccess); + return A.Closure_cspForwardCall($function.length, needsDirectAccess, stubName, $function); + }, + Closure_cspForwardInterceptedCall(arity, needsDirectAccess, stubName, $function) { + var getReceiver = A.BoundClosure_receiverOf, + getInterceptor = A.BoundClosure_interceptorOf; + switch (needsDirectAccess ? -1 : arity) { + case 0: + throw A.wrapException(new A.RuntimeError("Intercepted function with no arguments.")); + case 1: + return function(entry, interceptorOf, receiverOf) { + return function() { + return interceptorOf(this)[entry](receiverOf(this)); + }; + }(stubName, getInterceptor, getReceiver); + case 2: + return function(entry, interceptorOf, receiverOf) { + return function(a) { + return interceptorOf(this)[entry](receiverOf(this), a); + }; + }(stubName, getInterceptor, getReceiver); + case 3: + return function(entry, interceptorOf, receiverOf) { + return function(a, b) { + return interceptorOf(this)[entry](receiverOf(this), a, b); + }; + }(stubName, getInterceptor, getReceiver); + case 4: + return function(entry, interceptorOf, receiverOf) { + return function(a, b, c) { + return interceptorOf(this)[entry](receiverOf(this), a, b, c); + }; + }(stubName, getInterceptor, getReceiver); + case 5: + return function(entry, interceptorOf, receiverOf) { + return function(a, b, c, d) { + return interceptorOf(this)[entry](receiverOf(this), a, b, c, d); + }; + }(stubName, getInterceptor, getReceiver); + case 6: + return function(entry, interceptorOf, receiverOf) { + return function(a, b, c, d, e) { + return interceptorOf(this)[entry](receiverOf(this), a, b, c, d, e); + }; + }(stubName, getInterceptor, getReceiver); + default: + return function(f, interceptorOf, receiverOf) { + return function() { + var a = [receiverOf(this)]; + Array.prototype.push.apply(a, arguments); + return f.apply(interceptorOf(this), a); + }; + }($function, getInterceptor, getReceiver); + } + }, + Closure_forwardInterceptedCallTo(stubName, $function, needsDirectAccess) { + var arity, t1; + if ($.BoundClosure__interceptorFieldNameCache == null) + $.BoundClosure__interceptorFieldNameCache = A.BoundClosure__computeFieldNamed("interceptor"); + if ($.BoundClosure__receiverFieldNameCache == null) + $.BoundClosure__receiverFieldNameCache = A.BoundClosure__computeFieldNamed("receiver"); + arity = $function.length; + t1 = A.Closure_cspForwardInterceptedCall(arity, needsDirectAccess, stubName, $function); + return t1; + }, + closureFromTearOff(parameters) { + return A.Closure_fromTearOff(parameters); + }, + BoundClosure_evalRecipe(closure, recipe) { + return A._Universe_evalInEnvironment(init.typeUniverse, A.instanceType(closure._receiver), recipe); + }, + BoundClosure_receiverOf(closure) { + return closure._receiver; + }, + BoundClosure_interceptorOf(closure) { + return closure._interceptor; + }, + BoundClosure__computeFieldNamed(fieldName) { + var names, i, $name, + template = new A.BoundClosure("receiver", "interceptor"), + t1 = Object.getOwnPropertyNames(template); + t1.$flags = 1; + names = t1; + for (t1 = names.length, i = 0; i < t1; ++i) { + $name = names[i]; + if (template[$name] === fieldName) + return $name; + } + throw A.wrapException(A.ArgumentError$("Field name " + fieldName + " not found.", null)); + }, + boolConversionCheck(value) { + if (value == null) + A.assertThrow("boolean expression must not be null"); + return value; + }, + assertThrow(message) { + throw A.wrapException(new A._AssertionError(message)); + }, + throwCyclicInit(staticName) { + throw A.wrapException(new A._CyclicInitializationError(staticName)); + }, + getIsolateAffinityTag($name) { + return init.getIsolateTag($name); + }, + defineProperty(obj, property, value) { + Object.defineProperty(obj, property, {value: value, enumerable: false, writable: true, configurable: true}); + }, + lookupAndCacheInterceptor(obj) { + var interceptor, interceptorClass, altTag, mark, t1, + tag = A._asString($.getTagFunction.call$1(obj)), + record = $.dispatchRecordsForInstanceTags[tag]; + if (record != null) { + Object.defineProperty(obj, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + return record.i; + } + interceptor = $.interceptorsForUncacheableTags[tag]; + if (interceptor != null) + return interceptor; + interceptorClass = init.interceptorsByTag[tag]; + if (interceptorClass == null) { + altTag = A._asStringQ($.alternateTagFunction.call$2(obj, tag)); + if (altTag != null) { + record = $.dispatchRecordsForInstanceTags[altTag]; + if (record != null) { + Object.defineProperty(obj, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + return record.i; + } + interceptor = $.interceptorsForUncacheableTags[altTag]; + if (interceptor != null) + return interceptor; + interceptorClass = init.interceptorsByTag[altTag]; + tag = altTag; + } + } + if (interceptorClass == null) + return null; + interceptor = interceptorClass.prototype; + mark = tag[0]; + if (mark === "!") { + record = A.makeLeafDispatchRecord(interceptor); + $.dispatchRecordsForInstanceTags[tag] = record; + Object.defineProperty(obj, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + return record.i; + } + if (mark === "~") { + $.interceptorsForUncacheableTags[tag] = interceptor; + return interceptor; + } + if (mark === "-") { + t1 = A.makeLeafDispatchRecord(interceptor); + Object.defineProperty(Object.getPrototypeOf(obj), init.dispatchPropertyName, {value: t1, enumerable: false, writable: true, configurable: true}); + return t1.i; + } + if (mark === "+") + return A.patchInteriorProto(obj, interceptor); + if (mark === "*") + throw A.wrapException(A.UnimplementedError$(tag)); + if (init.leafTags[tag] === true) { + t1 = A.makeLeafDispatchRecord(interceptor); + Object.defineProperty(Object.getPrototypeOf(obj), init.dispatchPropertyName, {value: t1, enumerable: false, writable: true, configurable: true}); + return t1.i; + } else + return A.patchInteriorProto(obj, interceptor); + }, + patchInteriorProto(obj, interceptor) { + var proto = Object.getPrototypeOf(obj); + Object.defineProperty(proto, init.dispatchPropertyName, {value: J.makeDispatchRecord(interceptor, proto, null, null), enumerable: false, writable: true, configurable: true}); + return interceptor; + }, + makeLeafDispatchRecord(interceptor) { + return J.makeDispatchRecord(interceptor, false, null, !!interceptor.$isJavaScriptIndexingBehavior); + }, + makeDefaultDispatchRecord(tag, interceptorClass, proto) { + var interceptor = interceptorClass.prototype; + if (init.leafTags[tag] === true) + return A.makeLeafDispatchRecord(interceptor); + else + return J.makeDispatchRecord(interceptor, proto, null, null); + }, + initNativeDispatch() { + if (true === $.initNativeDispatchFlag) + return; + $.initNativeDispatchFlag = true; + A.initNativeDispatchContinue(); + }, + initNativeDispatchContinue() { + var map, tags, fun, i, tag, proto, record, interceptorClass; + $.dispatchRecordsForInstanceTags = Object.create(null); + $.interceptorsForUncacheableTags = Object.create(null); + A.initHooks(); + map = init.interceptorsByTag; + tags = Object.getOwnPropertyNames(map); + if (typeof window != "undefined") { + window; + fun = function() { + }; + for (i = 0; i < tags.length; ++i) { + tag = tags[i]; + proto = $.prototypeForTagFunction.call$1(tag); + if (proto != null) { + record = A.makeDefaultDispatchRecord(tag, map[tag], proto); + if (record != null) { + Object.defineProperty(proto, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + fun.prototype = proto; + } + } + } + } + for (i = 0; i < tags.length; ++i) { + tag = tags[i]; + if (/^[A-Za-z_]/.test(tag)) { + interceptorClass = map[tag]; + map["!" + tag] = interceptorClass; + map["~" + tag] = interceptorClass; + map["-" + tag] = interceptorClass; + map["+" + tag] = interceptorClass; + map["*" + tag] = interceptorClass; + } + } + }, + initHooks() { + var transformers, i, transformer, getTag, getUnknownTag, prototypeForTag, + hooks = B.C_JS_CONST0(); + hooks = A.applyHooksTransformer(B.C_JS_CONST1, A.applyHooksTransformer(B.C_JS_CONST2, A.applyHooksTransformer(B.C_JS_CONST3, A.applyHooksTransformer(B.C_JS_CONST3, A.applyHooksTransformer(B.C_JS_CONST4, A.applyHooksTransformer(B.C_JS_CONST5, A.applyHooksTransformer(B.C_JS_CONST6(B.C_JS_CONST), hooks))))))); + if (typeof dartNativeDispatchHooksTransformer != "undefined") { + transformers = dartNativeDispatchHooksTransformer; + if (typeof transformers == "function") + transformers = [transformers]; + if (Array.isArray(transformers)) + for (i = 0; i < transformers.length; ++i) { + transformer = transformers[i]; + if (typeof transformer == "function") + hooks = transformer(hooks) || hooks; + } + } + getTag = hooks.getTag; + getUnknownTag = hooks.getUnknownTag; + prototypeForTag = hooks.prototypeForTag; + $.getTagFunction = new A.initHooks_closure(getTag); + $.alternateTagFunction = new A.initHooks_closure0(getUnknownTag); + $.prototypeForTagFunction = new A.initHooks_closure1(prototypeForTag); + }, + applyHooksTransformer(transformer, hooks) { + return transformer(hooks) || hooks; + }, + createRecordTypePredicate(shape, fieldRtis) { + var $length = fieldRtis.length, + $function = init.rttc["" + $length + ";" + shape]; + if ($function == null) + return null; + if ($length === 0) + return $function; + if ($length === $function.length) + return $function.apply(null, fieldRtis); + return $function(fieldRtis); + }, + quoteStringForRegExp(string) { + if (/[[\]{}()*+?.\\^$|]/.test(string)) + return string.replace(/[[\]{}()*+?.\\^$|]/g, "\\$&"); + return string; + }, + ConstantMapView: function ConstantMapView(t0, t1) { + this._collection$_map = t0; + this.$ti = t1; + }, + ConstantMap: function ConstantMap() { + }, + ConstantStringMap: function ConstantStringMap(t0, t1, t2) { + this._jsIndex = t0; + this._values = t1; + this.$ti = t2; + }, + _KeysOrValues: function _KeysOrValues(t0, t1) { + this._elements = t0; + this.$ti = t1; + }, + _KeysOrValuesOrElementsIterator: function _KeysOrValuesOrElementsIterator(t0, t1, t2) { + var _ = this; + _._elements = t0; + _.__js_helper$_length = t1; + _.__js_helper$_index = 0; + _.__js_helper$_current = null; + _.$ti = t2; + }, + JSInvocationMirror: function JSInvocationMirror(t0, t1, t2, t3, t4) { + var _ = this; + _._memberName = t0; + _.__js_helper$_kind = t1; + _._arguments = t2; + _._namedArgumentNames = t3; + _._typeArgumentCount = t4; + }, + Primitives_functionNoSuchMethod_closure: function Primitives_functionNoSuchMethod_closure(t0, t1, t2) { + this._box_0 = t0; + this.namedArgumentList = t1; + this.$arguments = t2; + }, + TypeErrorDecoder: function TypeErrorDecoder(t0, t1, t2, t3, t4, t5) { + var _ = this; + _._pattern = t0; + _._arguments = t1; + _._argumentsExpr = t2; + _._expr = t3; + _._method = t4; + _._receiver = t5; + }, + NullError: function NullError() { + }, + JsNoSuchMethodError: function JsNoSuchMethodError(t0, t1, t2) { + this.__js_helper$_message = t0; + this._method = t1; + this._receiver = t2; + }, + UnknownJsTypeError: function UnknownJsTypeError(t0) { + this.__js_helper$_message = t0; + }, + NullThrownFromJavaScriptException: function NullThrownFromJavaScriptException(t0) { + this._irritant = t0; + }, + ExceptionAndStackTrace: function ExceptionAndStackTrace(t0, t1) { + this.dartException = t0; + this.stackTrace = t1; + }, + _StackTrace: function _StackTrace(t0) { + this._exception = t0; + this._trace = null; + }, + Closure: function Closure() { + }, + Closure0Args: function Closure0Args() { + }, + Closure2Args: function Closure2Args() { + }, + TearOffClosure: function TearOffClosure() { + }, + StaticClosure: function StaticClosure() { + }, + BoundClosure: function BoundClosure(t0, t1) { + this._receiver = t0; + this._interceptor = t1; + }, + _CyclicInitializationError: function _CyclicInitializationError(t0) { + this.variableName = t0; + }, + RuntimeError: function RuntimeError(t0) { + this.message = t0; + }, + _AssertionError: function _AssertionError(t0) { + this.message = t0; + }, + _Required: function _Required() { + }, + JsLinkedHashMap: function JsLinkedHashMap(t0) { + var _ = this; + _.__js_helper$_length = 0; + _._last = _._first = _.__js_helper$_rest = _._nums = _._strings = null; + _._modifications = 0; + _.$ti = t0; + }, + LinkedHashMapCell: function LinkedHashMapCell(t0, t1) { + var _ = this; + _.hashMapCellKey = t0; + _.hashMapCellValue = t1; + _._previous = _._next = null; + }, + LinkedHashMapKeysIterable: function LinkedHashMapKeysIterable(t0, t1) { + this._map = t0; + this.$ti = t1; + }, + LinkedHashMapKeyIterator: function LinkedHashMapKeyIterator(t0, t1, t2, t3) { + var _ = this; + _._map = t0; + _._modifications = t1; + _._cell = t2; + _.__js_helper$_current = null; + _.$ti = t3; + }, + initHooks_closure: function initHooks_closure(t0) { + this.getTag = t0; + }, + initHooks_closure0: function initHooks_closure0(t0) { + this.getUnknownTag = t0; + }, + initHooks_closure1: function initHooks_closure1(t0) { + this.prototypeForTag = t0; + }, + _ensureNativeList(list) { + return list; + }, + NativeByteData_NativeByteData($length) { + return new DataView(new ArrayBuffer($length)); + }, + NativeUint8List_NativeUint8List($length) { + return new Uint8Array($length); + }, + NativeUint8List_NativeUint8List$view(buffer, offsetInBytes, $length) { + return $length == null ? new Uint8Array(buffer, offsetInBytes) : new Uint8Array(buffer, offsetInBytes, $length); + }, + _checkValidIndex(index, list, $length) { + if (index >>> 0 !== index || index >= $length) + throw A.wrapException(A.diagnoseIndexError(list, index)); + }, + _checkValidRange(start, end, $length) { + var t1; + if (!(start >>> 0 !== start)) + if (end == null) + t1 = start > $length; + else + t1 = end >>> 0 !== end || start > end || end > $length; + else + t1 = true; + if (t1) + throw A.wrapException(A.diagnoseRangeError(start, end, $length)); + if (end == null) + return $length; + return end; + }, + NativeByteBuffer: function NativeByteBuffer() { + }, + NativeTypedData: function NativeTypedData() { + }, + _UnmodifiableNativeByteBufferView: function _UnmodifiableNativeByteBufferView(t0) { + this._data = t0; + }, + NativeByteData: function NativeByteData() { + }, + NativeTypedArray: function NativeTypedArray() { + }, + NativeTypedArrayOfDouble: function NativeTypedArrayOfDouble() { + }, + NativeTypedArrayOfInt: function NativeTypedArrayOfInt() { + }, + NativeFloat32List: function NativeFloat32List() { + }, + NativeFloat64List: function NativeFloat64List() { + }, + NativeInt16List: function NativeInt16List() { + }, + NativeInt32List: function NativeInt32List() { + }, + NativeInt8List: function NativeInt8List() { + }, + NativeUint16List: function NativeUint16List() { + }, + NativeUint32List: function NativeUint32List() { + }, + NativeUint8ClampedList: function NativeUint8ClampedList() { + }, + NativeUint8List: function NativeUint8List() { + }, + _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin: function _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin() { + }, + _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin: function _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin() { + }, + _NativeTypedArrayOfInt_NativeTypedArray_ListMixin: function _NativeTypedArrayOfInt_NativeTypedArray_ListMixin() { + }, + _NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin: function _NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin() { + }, + Rti__getQuestionFromStar(universe, rti) { + var question = rti._precomputed1; + return question == null ? rti._precomputed1 = A._Universe__lookupQuestionRti(universe, rti._primary, true) : question; + }, + Rti__getFutureFromFutureOr(universe, rti) { + var future = rti._precomputed1; + return future == null ? rti._precomputed1 = A._Universe__lookupInterfaceRti(universe, "Future", [rti._primary]) : future; + }, + Rti__isUnionOfFunctionType(rti) { + var kind = rti._kind; + if (kind === 6 || kind === 7 || kind === 8) + return A.Rti__isUnionOfFunctionType(rti._primary); + return kind === 12 || kind === 13; + }, + Rti__getCanonicalRecipe(rti) { + return rti._canonicalRecipe; + }, + findType(recipe) { + return A._Universe_eval(init.typeUniverse, recipe, false); + }, + _substitute(universe, rti, typeArguments, depth) { + var baseType, substitutedBaseType, interfaceTypeArguments, substitutedInterfaceTypeArguments, base, substitutedBase, $arguments, substitutedArguments, t1, fields, substitutedFields, returnType, substitutedReturnType, functionParameters, substitutedFunctionParameters, bounds, substitutedBounds, index, argument, + kind = rti._kind; + switch (kind) { + case 5: + case 1: + case 2: + case 3: + case 4: + return rti; + case 6: + baseType = rti._primary; + substitutedBaseType = A._substitute(universe, baseType, typeArguments, depth); + if (substitutedBaseType === baseType) + return rti; + return A._Universe__lookupStarRti(universe, substitutedBaseType, true); + case 7: + baseType = rti._primary; + substitutedBaseType = A._substitute(universe, baseType, typeArguments, depth); + if (substitutedBaseType === baseType) + return rti; + return A._Universe__lookupQuestionRti(universe, substitutedBaseType, true); + case 8: + baseType = rti._primary; + substitutedBaseType = A._substitute(universe, baseType, typeArguments, depth); + if (substitutedBaseType === baseType) + return rti; + return A._Universe__lookupFutureOrRti(universe, substitutedBaseType, true); + case 9: + interfaceTypeArguments = rti._rest; + substitutedInterfaceTypeArguments = A._substituteArray(universe, interfaceTypeArguments, typeArguments, depth); + if (substitutedInterfaceTypeArguments === interfaceTypeArguments) + return rti; + return A._Universe__lookupInterfaceRti(universe, rti._primary, substitutedInterfaceTypeArguments); + case 10: + base = rti._primary; + substitutedBase = A._substitute(universe, base, typeArguments, depth); + $arguments = rti._rest; + substitutedArguments = A._substituteArray(universe, $arguments, typeArguments, depth); + if (substitutedBase === base && substitutedArguments === $arguments) + return rti; + return A._Universe__lookupBindingRti(universe, substitutedBase, substitutedArguments); + case 11: + t1 = rti._primary; + fields = rti._rest; + substitutedFields = A._substituteArray(universe, fields, typeArguments, depth); + if (substitutedFields === fields) + return rti; + return A._Universe__lookupRecordRti(universe, t1, substitutedFields); + case 12: + returnType = rti._primary; + substitutedReturnType = A._substitute(universe, returnType, typeArguments, depth); + functionParameters = rti._rest; + substitutedFunctionParameters = A._substituteFunctionParameters(universe, functionParameters, typeArguments, depth); + if (substitutedReturnType === returnType && substitutedFunctionParameters === functionParameters) + return rti; + return A._Universe__lookupFunctionRti(universe, substitutedReturnType, substitutedFunctionParameters); + case 13: + bounds = rti._rest; + depth += bounds.length; + substitutedBounds = A._substituteArray(universe, bounds, typeArguments, depth); + base = rti._primary; + substitutedBase = A._substitute(universe, base, typeArguments, depth); + if (substitutedBounds === bounds && substitutedBase === base) + return rti; + return A._Universe__lookupGenericFunctionRti(universe, substitutedBase, substitutedBounds, true); + case 14: + index = rti._primary; + if (index < depth) + return rti; + argument = typeArguments[index - depth]; + if (argument == null) + return rti; + return argument; + default: + throw A.wrapException(A.AssertionError$("Attempted to substitute unexpected RTI kind " + kind)); + } + }, + _substituteArray(universe, rtiArray, typeArguments, depth) { + var changed, i, rti, substitutedRti, + $length = rtiArray.length, + result = A._Utils_newArrayOrEmpty($length); + for (changed = false, i = 0; i < $length; ++i) { + rti = rtiArray[i]; + substitutedRti = A._substitute(universe, rti, typeArguments, depth); + if (substitutedRti !== rti) + changed = true; + result[i] = substitutedRti; + } + return changed ? result : rtiArray; + }, + _substituteNamed(universe, namedArray, typeArguments, depth) { + var changed, i, t1, t2, rti, substitutedRti, + $length = namedArray.length, + result = A._Utils_newArrayOrEmpty($length); + for (changed = false, i = 0; i < $length; i += 3) { + t1 = namedArray[i]; + t2 = namedArray[i + 1]; + rti = namedArray[i + 2]; + substitutedRti = A._substitute(universe, rti, typeArguments, depth); + if (substitutedRti !== rti) + changed = true; + result.splice(i, 3, t1, t2, substitutedRti); + } + return changed ? result : namedArray; + }, + _substituteFunctionParameters(universe, functionParameters, typeArguments, depth) { + var result, + requiredPositional = functionParameters._requiredPositional, + substitutedRequiredPositional = A._substituteArray(universe, requiredPositional, typeArguments, depth), + optionalPositional = functionParameters._optionalPositional, + substitutedOptionalPositional = A._substituteArray(universe, optionalPositional, typeArguments, depth), + named = functionParameters._named, + substitutedNamed = A._substituteNamed(universe, named, typeArguments, depth); + if (substitutedRequiredPositional === requiredPositional && substitutedOptionalPositional === optionalPositional && substitutedNamed === named) + return functionParameters; + result = new A._FunctionParameters(); + result._requiredPositional = substitutedRequiredPositional; + result._optionalPositional = substitutedOptionalPositional; + result._named = substitutedNamed; + return result; + }, + _setArrayType(target, rti) { + target[init.arrayRti] = rti; + return target; + }, + closureFunctionType(closure) { + var signature = closure.$signature; + if (signature != null) { + if (typeof signature == "number") + return A.getTypeFromTypesTable(signature); + return closure.$signature(); + } + return null; + }, + instanceOrFunctionType(object, testRti) { + var rti; + if (A.Rti__isUnionOfFunctionType(testRti)) + if (object instanceof A.Closure) { + rti = A.closureFunctionType(object); + if (rti != null) + return rti; + } + return A.instanceType(object); + }, + instanceType(object) { + if (object instanceof A.Object) + return A._instanceType(object); + if (Array.isArray(object)) + return A._arrayInstanceType(object); + return A._instanceTypeFromConstructor(J.getInterceptor$(object)); + }, + _arrayInstanceType(object) { + var rti = object[init.arrayRti], + defaultRti = type$.JSArray_dynamic; + if (rti == null) + return defaultRti; + if (rti.constructor !== defaultRti.constructor) + return defaultRti; + return rti; + }, + _instanceType(object) { + var rti = object.$ti; + return rti != null ? rti : A._instanceTypeFromConstructor(object); + }, + _instanceTypeFromConstructor(instance) { + var $constructor = instance.constructor, + probe = $constructor.$ccache; + if (probe != null) + return probe; + return A._instanceTypeFromConstructorMiss(instance, $constructor); + }, + _instanceTypeFromConstructorMiss(instance, $constructor) { + var effectiveConstructor = instance instanceof A.Closure ? Object.getPrototypeOf(Object.getPrototypeOf(instance)).constructor : $constructor, + rti = A._Universe_findErasedType(init.typeUniverse, effectiveConstructor.name); + $constructor.$ccache = rti; + return rti; + }, + getTypeFromTypesTable(index) { + var rti, + table = init.types, + type = table[index]; + if (typeof type == "string") { + rti = A._Universe_eval(init.typeUniverse, type, false); + table[index] = rti; + return rti; + } + return type; + }, + getRuntimeTypeOfDartObject(object) { + return A.createRuntimeType(A._instanceType(object)); + }, + _structuralTypeOf(object) { + var functionRti = object instanceof A.Closure ? A.closureFunctionType(object) : null; + if (functionRti != null) + return functionRti; + if (type$.TrustedGetRuntimeType._is(object)) + return J.get$runtimeType$(object)._rti; + if (Array.isArray(object)) + return A._arrayInstanceType(object); + return A.instanceType(object); + }, + createRuntimeType(rti) { + var t1 = rti._cachedRuntimeType; + return t1 == null ? rti._cachedRuntimeType = A._createRuntimeType(rti) : t1; + }, + _createRuntimeType(rti) { + var starErasedRti, t1, + s = rti._canonicalRecipe, + starErasedRecipe = s.replace(/\*/g, ""); + if (starErasedRecipe === s) + return rti._cachedRuntimeType = new A._Type(rti); + starErasedRti = A._Universe_eval(init.typeUniverse, starErasedRecipe, true); + t1 = starErasedRti._cachedRuntimeType; + return t1 == null ? starErasedRti._cachedRuntimeType = A._createRuntimeType(starErasedRti) : t1; + }, + typeLiteral(recipe) { + return A.createRuntimeType(A._Universe_eval(init.typeUniverse, recipe, false)); + }, + _installSpecializedIsTest(object) { + var t1, unstarred, unstarredKind, isFn, $name, predicate, testRti = this; + if (testRti === type$.Object) + return A._finishIsFn(testRti, object, A._isObject); + if (!A.isSoundTopType(testRti)) + t1 = testRti === type$.legacy_Object; + else + t1 = true; + if (t1) + return A._finishIsFn(testRti, object, A._isTop); + t1 = testRti._kind; + if (t1 === 7) + return A._finishIsFn(testRti, object, A._generalNullableIsTestImplementation); + if (t1 === 1) + return A._finishIsFn(testRti, object, A._isNever); + unstarred = t1 === 6 ? testRti._primary : testRti; + unstarredKind = unstarred._kind; + if (unstarredKind === 8) + return A._finishIsFn(testRti, object, A._isFutureOr); + if (unstarred === type$.int) + isFn = A._isInt; + else if (unstarred === type$.double || unstarred === type$.num) + isFn = A._isNum; + else if (unstarred === type$.String) + isFn = A._isString; + else + isFn = unstarred === type$.bool ? A._isBool : null; + if (isFn != null) + return A._finishIsFn(testRti, object, isFn); + if (unstarredKind === 9) { + $name = unstarred._primary; + if (unstarred._rest.every(A.isDefinitelyTopType)) { + testRti._specializedTestResource = "$is" + $name; + if ($name === "List") + return A._finishIsFn(testRti, object, A._isListTestViaProperty); + return A._finishIsFn(testRti, object, A._isTestViaProperty); + } + } else if (unstarredKind === 11) { + predicate = A.createRecordTypePredicate(unstarred._primary, unstarred._rest); + return A._finishIsFn(testRti, object, predicate == null ? A._isNever : predicate); + } + return A._finishIsFn(testRti, object, A._generalIsTestImplementation); + }, + _finishIsFn(testRti, object, isFn) { + testRti._is = isFn; + return testRti._is(object); + }, + _installSpecializedAsCheck(object) { + var t1, testRti = this, + asFn = A._generalAsCheckImplementation; + if (!A.isSoundTopType(testRti)) + t1 = testRti === type$.legacy_Object; + else + t1 = true; + if (t1) + asFn = A._asTop; + else if (testRti === type$.Object) + asFn = A._asObject; + else { + t1 = A.isNullable(testRti); + if (t1) + asFn = A._generalNullableAsCheckImplementation; + } + testRti._as = asFn; + return testRti._as(object); + }, + _nullIs(testRti) { + var kind = testRti._kind, + t1 = true; + if (!A.isSoundTopType(testRti)) + if (!(testRti === type$.legacy_Object)) + if (!(testRti === type$.legacy_Never)) + if (kind !== 7) + if (!(kind === 6 && A._nullIs(testRti._primary))) + t1 = kind === 8 && A._nullIs(testRti._primary) || testRti === type$.Null || testRti === type$.JSNull; + return t1; + }, + _generalIsTestImplementation(object) { + var testRti = this; + if (object == null) + return A._nullIs(testRti); + return A.isSubtype(init.typeUniverse, A.instanceOrFunctionType(object, testRti), testRti); + }, + _generalNullableIsTestImplementation(object) { + if (object == null) + return true; + return this._primary._is(object); + }, + _isTestViaProperty(object) { + var tag, testRti = this; + if (object == null) + return A._nullIs(testRti); + tag = testRti._specializedTestResource; + if (object instanceof A.Object) + return !!object[tag]; + return !!J.getInterceptor$(object)[tag]; + }, + _isListTestViaProperty(object) { + var tag, testRti = this; + if (object == null) + return A._nullIs(testRti); + if (typeof object != "object") + return false; + if (Array.isArray(object)) + return true; + tag = testRti._specializedTestResource; + if (object instanceof A.Object) + return !!object[tag]; + return !!J.getInterceptor$(object)[tag]; + }, + _generalAsCheckImplementation(object) { + var testRti = this; + if (object == null) { + if (A.isNullable(testRti)) + return object; + } else if (testRti._is(object)) + return object; + A._failedAsCheck(object, testRti); + }, + _generalNullableAsCheckImplementation(object) { + var testRti = this; + if (object == null) + return object; + else if (testRti._is(object)) + return object; + A._failedAsCheck(object, testRti); + }, + _failedAsCheck(object, testRti) { + throw A.wrapException(A._TypeError$fromMessage(A._Error_compose(object, A._rtiToString(testRti, null)))); + }, + _Error_compose(object, checkedTypeDescription) { + return A.Error_safeToString(object) + ": type '" + A._rtiToString(A._structuralTypeOf(object), null) + "' is not a subtype of type '" + checkedTypeDescription + "'"; + }, + _TypeError$fromMessage(message) { + return new A._TypeError("TypeError: " + message); + }, + _TypeError__TypeError$forType(object, type) { + return new A._TypeError("TypeError: " + A._Error_compose(object, type)); + }, + _isFutureOr(object) { + var testRti = this, + unstarred = testRti._kind === 6 ? testRti._primary : testRti; + return unstarred._primary._is(object) || A.Rti__getFutureFromFutureOr(init.typeUniverse, unstarred)._is(object); + }, + _isObject(object) { + return object != null; + }, + _asObject(object) { + if (object != null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "Object")); + }, + _isTop(object) { + return true; + }, + _asTop(object) { + return object; + }, + _isNever(object) { + return false; + }, + _isBool(object) { + return true === object || false === object; + }, + _asBool(object) { + if (true === object) + return true; + if (false === object) + return false; + throw A.wrapException(A._TypeError__TypeError$forType(object, "bool")); + }, + _asBoolS(object) { + if (true === object) + return true; + if (false === object) + return false; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "bool")); + }, + _asBoolQ(object) { + if (true === object) + return true; + if (false === object) + return false; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "bool?")); + }, + _asDouble(object) { + if (typeof object == "number") + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "double")); + }, + _asDoubleS(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "double")); + }, + _asDoubleQ(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "double?")); + }, + _isInt(object) { + return typeof object == "number" && Math.floor(object) === object; + }, + _asInt(object) { + if (typeof object == "number" && Math.floor(object) === object) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "int")); + }, + _asIntS(object) { + if (typeof object == "number" && Math.floor(object) === object) + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "int")); + }, + _asIntQ(object) { + if (typeof object == "number" && Math.floor(object) === object) + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "int?")); + }, + _isNum(object) { + return typeof object == "number"; + }, + _asNum(object) { + if (typeof object == "number") + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "num")); + }, + _asNumS(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "num")); + }, + _asNumQ(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "num?")); + }, + _isString(object) { + return typeof object == "string"; + }, + _asString(object) { + if (typeof object == "string") + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "String")); + }, + _asStringS(object) { + if (typeof object == "string") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "String")); + }, + _asStringQ(object) { + if (typeof object == "string") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "String?")); + }, + _rtiArrayToString(array, genericContext) { + var s, sep, i; + for (s = "", sep = "", i = 0; i < array.length; ++i, sep = ", ") + s += sep + A._rtiToString(array[i], genericContext); + return s; + }, + _recordRtiToString(recordType, genericContext) { + var fieldCount, names, namesIndex, s, comma, i, + partialShape = recordType._primary, + fields = recordType._rest; + if ("" === partialShape) + return "(" + A._rtiArrayToString(fields, genericContext) + ")"; + fieldCount = fields.length; + names = partialShape.split(","); + namesIndex = names.length - fieldCount; + for (s = "(", comma = "", i = 0; i < fieldCount; ++i, comma = ", ") { + s += comma; + if (namesIndex === 0) + s += "{"; + s += A._rtiToString(fields[i], genericContext); + if (namesIndex >= 0) + s += " " + names[namesIndex]; + ++namesIndex; + } + return s + "})"; + }, + _functionRtiToString(functionType, genericContext, bounds) { + var boundsLength, offset, i, t1, t2, typeParametersText, typeSep, t3, t4, boundRti, kind, parameters, requiredPositional, requiredPositionalLength, optionalPositional, optionalPositionalLength, named, namedLength, returnTypeText, argumentsText, sep, _s2_ = ", ", outerContextLength = null; + if (bounds != null) { + boundsLength = bounds.length; + if (genericContext == null) + genericContext = A._setArrayType([], type$.JSArray_String); + else + outerContextLength = genericContext.length; + offset = genericContext.length; + for (i = boundsLength; i > 0; --i) + B.JSArray_methods.add$1(genericContext, "T" + (offset + i)); + for (t1 = type$.nullable_Object, t2 = type$.legacy_Object, typeParametersText = "<", typeSep = "", i = 0; i < boundsLength; ++i, typeSep = _s2_) { + t3 = genericContext.length; + t4 = t3 - 1 - i; + if (!(t4 >= 0)) + return A.ioore(genericContext, t4); + typeParametersText = typeParametersText + typeSep + genericContext[t4]; + boundRti = bounds[i]; + kind = boundRti._kind; + if (!(kind === 2 || kind === 3 || kind === 4 || kind === 5 || boundRti === t1)) + t3 = boundRti === t2; + else + t3 = true; + if (!t3) + typeParametersText += " extends " + A._rtiToString(boundRti, genericContext); + } + typeParametersText += ">"; + } else + typeParametersText = ""; + t1 = functionType._primary; + parameters = functionType._rest; + requiredPositional = parameters._requiredPositional; + requiredPositionalLength = requiredPositional.length; + optionalPositional = parameters._optionalPositional; + optionalPositionalLength = optionalPositional.length; + named = parameters._named; + namedLength = named.length; + returnTypeText = A._rtiToString(t1, genericContext); + for (argumentsText = "", sep = "", i = 0; i < requiredPositionalLength; ++i, sep = _s2_) + argumentsText += sep + A._rtiToString(requiredPositional[i], genericContext); + if (optionalPositionalLength > 0) { + argumentsText += sep + "["; + for (sep = "", i = 0; i < optionalPositionalLength; ++i, sep = _s2_) + argumentsText += sep + A._rtiToString(optionalPositional[i], genericContext); + argumentsText += "]"; + } + if (namedLength > 0) { + argumentsText += sep + "{"; + for (sep = "", i = 0; i < namedLength; i += 3, sep = _s2_) { + argumentsText += sep; + if (named[i + 1]) + argumentsText += "required "; + argumentsText += A._rtiToString(named[i + 2], genericContext) + " " + named[i]; + } + argumentsText += "}"; + } + if (outerContextLength != null) { + genericContext.toString; + genericContext.length = outerContextLength; + } + return typeParametersText + "(" + argumentsText + ") => " + returnTypeText; + }, + _rtiToString(rti, genericContext) { + var questionArgument, s, argumentKind, $name, $arguments, t1, t2, + kind = rti._kind; + if (kind === 5) + return "erased"; + if (kind === 2) + return "dynamic"; + if (kind === 3) + return "void"; + if (kind === 1) + return "Never"; + if (kind === 4) + return "any"; + if (kind === 6) + return A._rtiToString(rti._primary, genericContext); + if (kind === 7) { + questionArgument = rti._primary; + s = A._rtiToString(questionArgument, genericContext); + argumentKind = questionArgument._kind; + return (argumentKind === 12 || argumentKind === 13 ? "(" + s + ")" : s) + "?"; + } + if (kind === 8) + return "FutureOr<" + A._rtiToString(rti._primary, genericContext) + ">"; + if (kind === 9) { + $name = A._unminifyOrTag(rti._primary); + $arguments = rti._rest; + return $arguments.length > 0 ? $name + ("<" + A._rtiArrayToString($arguments, genericContext) + ">") : $name; + } + if (kind === 11) + return A._recordRtiToString(rti, genericContext); + if (kind === 12) + return A._functionRtiToString(rti, genericContext, null); + if (kind === 13) + return A._functionRtiToString(rti._primary, genericContext, rti._rest); + if (kind === 14) { + t1 = rti._primary; + t2 = genericContext.length; + t1 = t2 - 1 - t1; + if (!(t1 >= 0 && t1 < t2)) + return A.ioore(genericContext, t1); + return genericContext[t1]; + } + return "?"; + }, + _unminifyOrTag(rawClassName) { + var preserved = init.mangledGlobalNames[rawClassName]; + if (preserved != null) + return preserved; + return rawClassName; + }, + _Universe_findRule(universe, targetType) { + var rule = universe.tR[targetType]; + for (; typeof rule == "string";) + rule = universe.tR[rule]; + return rule; + }, + _Universe_findErasedType(universe, cls) { + var $length, erased, $arguments, i, $interface, + t1 = universe.eT, + probe = t1[cls]; + if (probe == null) + return A._Universe_eval(universe, cls, false); + else if (typeof probe == "number") { + $length = probe; + erased = A._Universe__lookupTerminalRti(universe, 5, "#"); + $arguments = A._Utils_newArrayOrEmpty($length); + for (i = 0; i < $length; ++i) + $arguments[i] = erased; + $interface = A._Universe__lookupInterfaceRti(universe, cls, $arguments); + t1[cls] = $interface; + return $interface; + } else + return probe; + }, + _Universe_addRules(universe, rules) { + return A._Utils_objectAssign(universe.tR, rules); + }, + _Universe_addErasedTypes(universe, types) { + return A._Utils_objectAssign(universe.eT, types); + }, + _Universe_eval(universe, recipe, normalize) { + var rti, + t1 = universe.eC, + probe = t1.get(recipe); + if (probe != null) + return probe; + rti = A._Parser_parse(A._Parser_create(universe, null, recipe, normalize)); + t1.set(recipe, rti); + return rti; + }, + _Universe_evalInEnvironment(universe, environment, recipe) { + var probe, rti, + cache = environment._evalCache; + if (cache == null) + cache = environment._evalCache = new Map(); + probe = cache.get(recipe); + if (probe != null) + return probe; + rti = A._Parser_parse(A._Parser_create(universe, environment, recipe, true)); + cache.set(recipe, rti); + return rti; + }, + _Universe_bind(universe, environment, argumentsRti) { + var argumentsRecipe, probe, rti, + cache = environment._bindCache; + if (cache == null) + cache = environment._bindCache = new Map(); + argumentsRecipe = argumentsRti._canonicalRecipe; + probe = cache.get(argumentsRecipe); + if (probe != null) + return probe; + rti = A._Universe__lookupBindingRti(universe, environment, argumentsRti._kind === 10 ? argumentsRti._rest : [argumentsRti]); + cache.set(argumentsRecipe, rti); + return rti; + }, + _Universe__installTypeTests(universe, rti) { + rti._as = A._installSpecializedAsCheck; + rti._is = A._installSpecializedIsTest; + return rti; + }, + _Universe__lookupTerminalRti(universe, kind, key) { + var rti, t1, + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = kind; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupStarRti(universe, baseType, normalize) { + var t1, + key = baseType._canonicalRecipe + "*", + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createStarRti(universe, baseType, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createStarRti(universe, baseType, key, normalize) { + var baseKind, t1, rti; + if (normalize) { + baseKind = baseType._kind; + if (!A.isSoundTopType(baseType)) + t1 = baseType === type$.Null || baseType === type$.JSNull || baseKind === 7 || baseKind === 6; + else + t1 = true; + if (t1) + return baseType; + } + rti = new A.Rti(null, null); + rti._kind = 6; + rti._primary = baseType; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Universe__lookupQuestionRti(universe, baseType, normalize) { + var t1, + key = baseType._canonicalRecipe + "?", + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createQuestionRti(universe, baseType, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createQuestionRti(universe, baseType, key, normalize) { + var baseKind, t1, starArgument, rti; + if (normalize) { + baseKind = baseType._kind; + t1 = true; + if (!A.isSoundTopType(baseType)) + if (!(baseType === type$.Null || baseType === type$.JSNull)) + if (baseKind !== 7) + t1 = baseKind === 8 && A.isNullable(baseType._primary); + if (t1) + return baseType; + else if (baseKind === 1 || baseType === type$.legacy_Never) + return type$.Null; + else if (baseKind === 6) { + starArgument = baseType._primary; + if (starArgument._kind === 8 && A.isNullable(starArgument._primary)) + return starArgument; + else + return A.Rti__getQuestionFromStar(universe, baseType); + } + } + rti = new A.Rti(null, null); + rti._kind = 7; + rti._primary = baseType; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Universe__lookupFutureOrRti(universe, baseType, normalize) { + var t1, + key = baseType._canonicalRecipe + "/", + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createFutureOrRti(universe, baseType, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createFutureOrRti(universe, baseType, key, normalize) { + var t1, rti; + if (normalize) { + t1 = baseType._kind; + if (A.isSoundTopType(baseType) || baseType === type$.Object || baseType === type$.legacy_Object) + return baseType; + else if (t1 === 1) + return A._Universe__lookupInterfaceRti(universe, "Future", [baseType]); + else if (baseType === type$.Null || baseType === type$.JSNull) + return type$.nullable_Future_Null; + } + rti = new A.Rti(null, null); + rti._kind = 8; + rti._primary = baseType; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Universe__lookupGenericFunctionParameterRti(universe, index) { + var rti, t1, + key = "" + index + "^", + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 14; + rti._primary = index; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__canonicalRecipeJoin($arguments) { + var s, sep, i, + $length = $arguments.length; + for (s = "", sep = "", i = 0; i < $length; ++i, sep = ",") + s += sep + $arguments[i]._canonicalRecipe; + return s; + }, + _Universe__canonicalRecipeJoinNamed($arguments) { + var s, sep, i, t1, nameSep, + $length = $arguments.length; + for (s = "", sep = "", i = 0; i < $length; i += 3, sep = ",") { + t1 = $arguments[i]; + nameSep = $arguments[i + 1] ? "!" : ":"; + s += sep + t1 + nameSep + $arguments[i + 2]._canonicalRecipe; + } + return s; + }, + _Universe__lookupInterfaceRti(universe, $name, $arguments) { + var probe, rti, t1, + s = $name; + if ($arguments.length > 0) + s += "<" + A._Universe__canonicalRecipeJoin($arguments) + ">"; + probe = universe.eC.get(s); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 9; + rti._primary = $name; + rti._rest = $arguments; + if ($arguments.length > 0) + rti._precomputed1 = $arguments[0]; + rti._canonicalRecipe = s; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(s, t1); + return t1; + }, + _Universe__lookupBindingRti(universe, base, $arguments) { + var newBase, newArguments, key, probe, rti, t1; + if (base._kind === 10) { + newBase = base._primary; + newArguments = base._rest.concat($arguments); + } else { + newArguments = $arguments; + newBase = base; + } + key = newBase._canonicalRecipe + (";<" + A._Universe__canonicalRecipeJoin(newArguments) + ">"); + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 10; + rti._primary = newBase; + rti._rest = newArguments; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupRecordRti(universe, partialShapeTag, fields) { + var rti, t1, + key = "+" + (partialShapeTag + "(" + A._Universe__canonicalRecipeJoin(fields) + ")"), + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 11; + rti._primary = partialShapeTag; + rti._rest = fields; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupFunctionRti(universe, returnType, parameters) { + var sep, key, probe, rti, t1, + s = returnType._canonicalRecipe, + requiredPositional = parameters._requiredPositional, + requiredPositionalLength = requiredPositional.length, + optionalPositional = parameters._optionalPositional, + optionalPositionalLength = optionalPositional.length, + named = parameters._named, + namedLength = named.length, + recipe = "(" + A._Universe__canonicalRecipeJoin(requiredPositional); + if (optionalPositionalLength > 0) { + sep = requiredPositionalLength > 0 ? "," : ""; + recipe += sep + "[" + A._Universe__canonicalRecipeJoin(optionalPositional) + "]"; + } + if (namedLength > 0) { + sep = requiredPositionalLength > 0 ? "," : ""; + recipe += sep + "{" + A._Universe__canonicalRecipeJoinNamed(named) + "}"; + } + key = s + (recipe + ")"); + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 12; + rti._primary = returnType; + rti._rest = parameters; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupGenericFunctionRti(universe, baseFunctionType, bounds, normalize) { + var t1, + key = baseFunctionType._canonicalRecipe + ("<" + A._Universe__canonicalRecipeJoin(bounds) + ">"), + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createGenericFunctionRti(universe, baseFunctionType, bounds, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createGenericFunctionRti(universe, baseFunctionType, bounds, key, normalize) { + var $length, typeArguments, count, i, bound, substitutedBase, substitutedBounds, rti; + if (normalize) { + $length = bounds.length; + typeArguments = A._Utils_newArrayOrEmpty($length); + for (count = 0, i = 0; i < $length; ++i) { + bound = bounds[i]; + if (bound._kind === 1) { + typeArguments[i] = bound; + ++count; + } + } + if (count > 0) { + substitutedBase = A._substitute(universe, baseFunctionType, typeArguments, 0); + substitutedBounds = A._substituteArray(universe, bounds, typeArguments, 0); + return A._Universe__lookupGenericFunctionRti(universe, substitutedBase, substitutedBounds, bounds !== substitutedBounds); + } + } + rti = new A.Rti(null, null); + rti._kind = 13; + rti._primary = baseFunctionType; + rti._rest = bounds; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Parser_create(universe, environment, recipe, normalize) { + return {u: universe, e: environment, r: recipe, s: [], p: 0, n: normalize}; + }, + _Parser_parse(parser) { + var t2, i, ch, t3, array, end, item, + source = parser.r, + t1 = parser.s; + for (t2 = source.length, i = 0; i < t2;) { + ch = source.charCodeAt(i); + if (ch >= 48 && ch <= 57) + i = A._Parser_handleDigit(i + 1, ch, source, t1); + else if ((((ch | 32) >>> 0) - 97 & 65535) < 26 || ch === 95 || ch === 36 || ch === 124) + i = A._Parser_handleIdentifier(parser, i, source, t1, false); + else if (ch === 46) + i = A._Parser_handleIdentifier(parser, i, source, t1, true); + else { + ++i; + switch (ch) { + case 44: + break; + case 58: + t1.push(false); + break; + case 33: + t1.push(true); + break; + case 59: + t1.push(A._Parser_toType(parser.u, parser.e, t1.pop())); + break; + case 94: + t1.push(A._Universe__lookupGenericFunctionParameterRti(parser.u, t1.pop())); + break; + case 35: + t1.push(A._Universe__lookupTerminalRti(parser.u, 5, "#")); + break; + case 64: + t1.push(A._Universe__lookupTerminalRti(parser.u, 2, "@")); + break; + case 126: + t1.push(A._Universe__lookupTerminalRti(parser.u, 3, "~")); + break; + case 60: + t1.push(parser.p); + parser.p = t1.length; + break; + case 62: + A._Parser_handleTypeArguments(parser, t1); + break; + case 38: + A._Parser_handleExtendedOperations(parser, t1); + break; + case 42: + t3 = parser.u; + t1.push(A._Universe__lookupStarRti(t3, A._Parser_toType(t3, parser.e, t1.pop()), parser.n)); + break; + case 63: + t3 = parser.u; + t1.push(A._Universe__lookupQuestionRti(t3, A._Parser_toType(t3, parser.e, t1.pop()), parser.n)); + break; + case 47: + t3 = parser.u; + t1.push(A._Universe__lookupFutureOrRti(t3, A._Parser_toType(t3, parser.e, t1.pop()), parser.n)); + break; + case 40: + t1.push(-3); + t1.push(parser.p); + parser.p = t1.length; + break; + case 41: + A._Parser_handleArguments(parser, t1); + break; + case 91: + t1.push(parser.p); + parser.p = t1.length; + break; + case 93: + array = t1.splice(parser.p); + A._Parser_toTypes(parser.u, parser.e, array); + parser.p = t1.pop(); + t1.push(array); + t1.push(-1); + break; + case 123: + t1.push(parser.p); + parser.p = t1.length; + break; + case 125: + array = t1.splice(parser.p); + A._Parser_toTypesNamed(parser.u, parser.e, array); + parser.p = t1.pop(); + t1.push(array); + t1.push(-2); + break; + case 43: + end = source.indexOf("(", i); + t1.push(source.substring(i, end)); + t1.push(-4); + t1.push(parser.p); + parser.p = t1.length; + i = end + 1; + break; + default: + throw "Bad character " + ch; + } + } + } + item = t1.pop(); + return A._Parser_toType(parser.u, parser.e, item); + }, + _Parser_handleDigit(i, digit, source, stack) { + var t1, ch, + value = digit - 48; + for (t1 = source.length; i < t1; ++i) { + ch = source.charCodeAt(i); + if (!(ch >= 48 && ch <= 57)) + break; + value = value * 10 + (ch - 48); + } + stack.push(value); + return i; + }, + _Parser_handleIdentifier(parser, start, source, stack, hasPeriod) { + var t1, ch, t2, string, environment, recipe, + i = start + 1; + for (t1 = source.length; i < t1; ++i) { + ch = source.charCodeAt(i); + if (ch === 46) { + if (hasPeriod) + break; + hasPeriod = true; + } else { + if (!((((ch | 32) >>> 0) - 97 & 65535) < 26 || ch === 95 || ch === 36 || ch === 124)) + t2 = ch >= 48 && ch <= 57; + else + t2 = true; + if (!t2) + break; + } + } + string = source.substring(start, i); + if (hasPeriod) { + t1 = parser.u; + environment = parser.e; + if (environment._kind === 10) + environment = environment._primary; + recipe = A._Universe_findRule(t1, environment._primary)[string]; + if (recipe == null) + A.throwExpression('No "' + string + '" in "' + A.Rti__getCanonicalRecipe(environment) + '"'); + stack.push(A._Universe_evalInEnvironment(t1, environment, recipe)); + } else + stack.push(string); + return i; + }, + _Parser_handleTypeArguments(parser, stack) { + var base, + t1 = parser.u, + $arguments = A._Parser_collectArray(parser, stack), + head = stack.pop(); + if (typeof head == "string") + stack.push(A._Universe__lookupInterfaceRti(t1, head, $arguments)); + else { + base = A._Parser_toType(t1, parser.e, head); + switch (base._kind) { + case 12: + stack.push(A._Universe__lookupGenericFunctionRti(t1, base, $arguments, parser.n)); + break; + default: + stack.push(A._Universe__lookupBindingRti(t1, base, $arguments)); + break; + } + } + }, + _Parser_handleArguments(parser, stack) { + var requiredPositional, returnType, parameters, + t1 = parser.u, + head = stack.pop(), + optionalPositional = null, named = null; + if (typeof head == "number") + switch (head) { + case -1: + optionalPositional = stack.pop(); + break; + case -2: + named = stack.pop(); + break; + default: + stack.push(head); + break; + } + else + stack.push(head); + requiredPositional = A._Parser_collectArray(parser, stack); + head = stack.pop(); + switch (head) { + case -3: + head = stack.pop(); + if (optionalPositional == null) + optionalPositional = t1.sEA; + if (named == null) + named = t1.sEA; + returnType = A._Parser_toType(t1, parser.e, head); + parameters = new A._FunctionParameters(); + parameters._requiredPositional = requiredPositional; + parameters._optionalPositional = optionalPositional; + parameters._named = named; + stack.push(A._Universe__lookupFunctionRti(t1, returnType, parameters)); + return; + case -4: + stack.push(A._Universe__lookupRecordRti(t1, stack.pop(), requiredPositional)); + return; + default: + throw A.wrapException(A.AssertionError$("Unexpected state under `()`: " + A.S(head))); + } + }, + _Parser_handleExtendedOperations(parser, stack) { + var $top = stack.pop(); + if (0 === $top) { + stack.push(A._Universe__lookupTerminalRti(parser.u, 1, "0&")); + return; + } + if (1 === $top) { + stack.push(A._Universe__lookupTerminalRti(parser.u, 4, "1&")); + return; + } + throw A.wrapException(A.AssertionError$("Unexpected extended operation " + A.S($top))); + }, + _Parser_collectArray(parser, stack) { + var array = stack.splice(parser.p); + A._Parser_toTypes(parser.u, parser.e, array); + parser.p = stack.pop(); + return array; + }, + _Parser_toType(universe, environment, item) { + if (typeof item == "string") + return A._Universe__lookupInterfaceRti(universe, item, universe.sEA); + else if (typeof item == "number") { + environment.toString; + return A._Parser_indexToType(universe, environment, item); + } else + return item; + }, + _Parser_toTypes(universe, environment, items) { + var i, + $length = items.length; + for (i = 0; i < $length; ++i) + items[i] = A._Parser_toType(universe, environment, items[i]); + }, + _Parser_toTypesNamed(universe, environment, items) { + var i, + $length = items.length; + for (i = 2; i < $length; i += 3) + items[i] = A._Parser_toType(universe, environment, items[i]); + }, + _Parser_indexToType(universe, environment, index) { + var typeArguments, len, + kind = environment._kind; + if (kind === 10) { + if (index === 0) + return environment._primary; + typeArguments = environment._rest; + len = typeArguments.length; + if (index <= len) + return typeArguments[index - 1]; + index -= len; + environment = environment._primary; + kind = environment._kind; + } else if (index === 0) + return environment; + if (kind !== 9) + throw A.wrapException(A.AssertionError$("Indexed base must be an interface type")); + typeArguments = environment._rest; + if (index <= typeArguments.length) + return typeArguments[index - 1]; + throw A.wrapException(A.AssertionError$("Bad index " + index + " for " + environment.toString$0(0))); + }, + isSubtype(universe, s, t) { + var result, + sCache = s._isSubtypeCache; + if (sCache == null) + sCache = s._isSubtypeCache = new Map(); + result = sCache.get(t); + if (result == null) { + result = A._isSubtype(universe, s, null, t, null, false) ? 1 : 0; + sCache.set(t, result); + } + if (0 === result) + return false; + if (1 === result) + return true; + return true; + }, + _isSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var t1, sKind, leftTypeVariable, tKind, t2, sBounds, tBounds, sLength, i, sBound, tBound; + if (s === t) + return true; + if (!A.isSoundTopType(t)) + t1 = t === type$.legacy_Object; + else + t1 = true; + if (t1) + return true; + sKind = s._kind; + if (sKind === 4) + return true; + if (A.isSoundTopType(s)) + return false; + t1 = s._kind; + if (t1 === 1) + return true; + leftTypeVariable = sKind === 14; + if (leftTypeVariable) + if (A._isSubtype(universe, sEnv[s._primary], sEnv, t, tEnv, false)) + return true; + tKind = t._kind; + t1 = s === type$.Null || s === type$.JSNull; + if (t1) { + if (tKind === 8) + return A._isSubtype(universe, s, sEnv, t._primary, tEnv, false); + return t === type$.Null || t === type$.JSNull || tKind === 7 || tKind === 6; + } + if (t === type$.Object) { + if (sKind === 8) + return A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + if (sKind === 6) + return A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + return sKind !== 7; + } + if (sKind === 6) + return A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + if (tKind === 6) { + t1 = A.Rti__getQuestionFromStar(universe, t); + return A._isSubtype(universe, s, sEnv, t1, tEnv, false); + } + if (sKind === 8) { + if (!A._isSubtype(universe, s._primary, sEnv, t, tEnv, false)) + return false; + return A._isSubtype(universe, A.Rti__getFutureFromFutureOr(universe, s), sEnv, t, tEnv, false); + } + if (sKind === 7) { + t1 = A._isSubtype(universe, type$.Null, sEnv, t, tEnv, false); + return t1 && A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + } + if (tKind === 8) { + if (A._isSubtype(universe, s, sEnv, t._primary, tEnv, false)) + return true; + return A._isSubtype(universe, s, sEnv, A.Rti__getFutureFromFutureOr(universe, t), tEnv, false); + } + if (tKind === 7) { + t1 = A._isSubtype(universe, s, sEnv, type$.Null, tEnv, false); + return t1 || A._isSubtype(universe, s, sEnv, t._primary, tEnv, false); + } + if (leftTypeVariable) + return false; + t1 = sKind !== 12; + if ((!t1 || sKind === 13) && t === type$.Function) + return true; + t2 = sKind === 11; + if (t2 && t === type$.Record) + return true; + if (tKind === 13) { + if (s === type$.JavaScriptFunction) + return true; + if (sKind !== 13) + return false; + sBounds = s._rest; + tBounds = t._rest; + sLength = sBounds.length; + if (sLength !== tBounds.length) + return false; + sEnv = sEnv == null ? sBounds : sBounds.concat(sEnv); + tEnv = tEnv == null ? tBounds : tBounds.concat(tEnv); + for (i = 0; i < sLength; ++i) { + sBound = sBounds[i]; + tBound = tBounds[i]; + if (!A._isSubtype(universe, sBound, sEnv, tBound, tEnv, false) || !A._isSubtype(universe, tBound, tEnv, sBound, sEnv, false)) + return false; + } + return A._isFunctionSubtype(universe, s._primary, sEnv, t._primary, tEnv, false); + } + if (tKind === 12) { + if (s === type$.JavaScriptFunction) + return true; + if (t1) + return false; + return A._isFunctionSubtype(universe, s, sEnv, t, tEnv, false); + } + if (sKind === 9) { + if (tKind !== 9) + return false; + return A._isInterfaceSubtype(universe, s, sEnv, t, tEnv, false); + } + if (t2 && tKind === 11) + return A._isRecordSubtype(universe, s, sEnv, t, tEnv, false); + return false; + }, + _isFunctionSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var sParameters, tParameters, sRequiredPositional, tRequiredPositional, sRequiredPositionalLength, tRequiredPositionalLength, requiredPositionalDelta, sOptionalPositional, tOptionalPositional, sOptionalPositionalLength, tOptionalPositionalLength, i, t1, sNamed, tNamed, sNamedLength, tNamedLength, sIndex, tIndex, tName, sName, sIsRequired; + if (!A._isSubtype(universe, s._primary, sEnv, t._primary, tEnv, false)) + return false; + sParameters = s._rest; + tParameters = t._rest; + sRequiredPositional = sParameters._requiredPositional; + tRequiredPositional = tParameters._requiredPositional; + sRequiredPositionalLength = sRequiredPositional.length; + tRequiredPositionalLength = tRequiredPositional.length; + if (sRequiredPositionalLength > tRequiredPositionalLength) + return false; + requiredPositionalDelta = tRequiredPositionalLength - sRequiredPositionalLength; + sOptionalPositional = sParameters._optionalPositional; + tOptionalPositional = tParameters._optionalPositional; + sOptionalPositionalLength = sOptionalPositional.length; + tOptionalPositionalLength = tOptionalPositional.length; + if (sRequiredPositionalLength + sOptionalPositionalLength < tRequiredPositionalLength + tOptionalPositionalLength) + return false; + for (i = 0; i < sRequiredPositionalLength; ++i) { + t1 = sRequiredPositional[i]; + if (!A._isSubtype(universe, tRequiredPositional[i], tEnv, t1, sEnv, false)) + return false; + } + for (i = 0; i < requiredPositionalDelta; ++i) { + t1 = sOptionalPositional[i]; + if (!A._isSubtype(universe, tRequiredPositional[sRequiredPositionalLength + i], tEnv, t1, sEnv, false)) + return false; + } + for (i = 0; i < tOptionalPositionalLength; ++i) { + t1 = sOptionalPositional[requiredPositionalDelta + i]; + if (!A._isSubtype(universe, tOptionalPositional[i], tEnv, t1, sEnv, false)) + return false; + } + sNamed = sParameters._named; + tNamed = tParameters._named; + sNamedLength = sNamed.length; + tNamedLength = tNamed.length; + for (sIndex = 0, tIndex = 0; tIndex < tNamedLength; tIndex += 3) { + tName = tNamed[tIndex]; + for (; true;) { + if (sIndex >= sNamedLength) + return false; + sName = sNamed[sIndex]; + sIndex += 3; + if (tName < sName) + return false; + sIsRequired = sNamed[sIndex - 2]; + if (sName < tName) { + if (sIsRequired) + return false; + continue; + } + t1 = tNamed[tIndex + 1]; + if (sIsRequired && !t1) + return false; + t1 = sNamed[sIndex - 1]; + if (!A._isSubtype(universe, tNamed[tIndex + 2], tEnv, t1, sEnv, false)) + return false; + break; + } + } + for (; sIndex < sNamedLength;) { + if (sNamed[sIndex + 1]) + return false; + sIndex += 3; + } + return true; + }, + _isInterfaceSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var rule, recipes, $length, supertypeArgs, i, + sName = s._primary, + tName = t._primary; + for (; sName !== tName;) { + rule = universe.tR[sName]; + if (rule == null) + return false; + if (typeof rule == "string") { + sName = rule; + continue; + } + recipes = rule[tName]; + if (recipes == null) + return false; + $length = recipes.length; + supertypeArgs = $length > 0 ? new Array($length) : init.typeUniverse.sEA; + for (i = 0; i < $length; ++i) + supertypeArgs[i] = A._Universe_evalInEnvironment(universe, s, recipes[i]); + return A._areArgumentsSubtypes(universe, supertypeArgs, null, sEnv, t._rest, tEnv, false); + } + return A._areArgumentsSubtypes(universe, s._rest, null, sEnv, t._rest, tEnv, false); + }, + _areArgumentsSubtypes(universe, sArgs, sVariances, sEnv, tArgs, tEnv, isLegacy) { + var i, + $length = sArgs.length; + for (i = 0; i < $length; ++i) + if (!A._isSubtype(universe, sArgs[i], sEnv, tArgs[i], tEnv, false)) + return false; + return true; + }, + _isRecordSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var i, + sFields = s._rest, + tFields = t._rest, + sCount = sFields.length; + if (sCount !== tFields.length) + return false; + if (s._primary !== t._primary) + return false; + for (i = 0; i < sCount; ++i) + if (!A._isSubtype(universe, sFields[i], sEnv, tFields[i], tEnv, false)) + return false; + return true; + }, + isNullable(t) { + var kind = t._kind, + t1 = true; + if (!(t === type$.Null || t === type$.JSNull)) + if (!A.isSoundTopType(t)) + if (kind !== 7) + if (!(kind === 6 && A.isNullable(t._primary))) + t1 = kind === 8 && A.isNullable(t._primary); + return t1; + }, + isDefinitelyTopType(t) { + var t1; + if (!A.isSoundTopType(t)) + t1 = t === type$.legacy_Object; + else + t1 = true; + return t1; + }, + isSoundTopType(t) { + var kind = t._kind; + return kind === 2 || kind === 3 || kind === 4 || kind === 5 || t === type$.nullable_Object; + }, + _Utils_objectAssign(o, other) { + var i, key, + keys = Object.keys(other), + $length = keys.length; + for (i = 0; i < $length; ++i) { + key = keys[i]; + o[key] = other[key]; + } + }, + _Utils_newArrayOrEmpty($length) { + return $length > 0 ? new Array($length) : init.typeUniverse.sEA; + }, + Rti: function Rti(t0, t1) { + var _ = this; + _._as = t0; + _._is = t1; + _._cachedRuntimeType = _._specializedTestResource = _._isSubtypeCache = _._precomputed1 = null; + _._kind = 0; + _._canonicalRecipe = _._bindCache = _._evalCache = _._rest = _._primary = null; + }, + _FunctionParameters: function _FunctionParameters() { + this._named = this._optionalPositional = this._requiredPositional = null; + }, + _Type: function _Type(t0) { + this._rti = t0; + }, + _Error: function _Error() { + }, + _TypeError: function _TypeError(t0) { + this.__rti$_message = t0; + }, + _AsyncRun__initializeScheduleImmediate() { + var t1, div, span; + if (self.scheduleImmediate != null) + return A.async__AsyncRun__scheduleImmediateJsOverride$closure(); + if (self.MutationObserver != null && self.document != null) { + t1 = {}; + div = self.document.createElement("div"); + span = self.document.createElement("span"); + t1.storedCallback = null; + new self.MutationObserver(A.convertDartClosureToJS(new A._AsyncRun__initializeScheduleImmediate_internalCallback(t1), 1)).observe(div, {childList: true}); + return new A._AsyncRun__initializeScheduleImmediate_closure(t1, div, span); + } else if (self.setImmediate != null) + return A.async__AsyncRun__scheduleImmediateWithSetImmediate$closure(); + return A.async__AsyncRun__scheduleImmediateWithTimer$closure(); + }, + _AsyncRun__scheduleImmediateJsOverride(callback) { + self.scheduleImmediate(A.convertDartClosureToJS(new A._AsyncRun__scheduleImmediateJsOverride_internalCallback(type$.void_Function._as(callback)), 0)); + }, + _AsyncRun__scheduleImmediateWithSetImmediate(callback) { + self.setImmediate(A.convertDartClosureToJS(new A._AsyncRun__scheduleImmediateWithSetImmediate_internalCallback(type$.void_Function._as(callback)), 0)); + }, + _AsyncRun__scheduleImmediateWithTimer(callback) { + type$.void_Function._as(callback); + A._TimerImpl$(0, callback); + }, + _TimerImpl$(milliseconds, callback) { + var t1 = new A._TimerImpl(); + t1._TimerImpl$2(milliseconds, callback); + return t1; + }, + _makeAsyncAwaitCompleter($T) { + return new A._AsyncAwaitCompleter(new A._Future($.Zone__current, $T._eval$1("_Future<0>")), $T._eval$1("_AsyncAwaitCompleter<0>")); + }, + _asyncStartSync(bodyFunction, completer) { + bodyFunction.call$2(0, null); + completer.isSync = true; + return completer._future; + }, + _asyncAwait(object, bodyFunction) { + A._awaitOnObject(object, bodyFunction); + }, + _asyncReturn(object, completer) { + completer.complete$1(object); + }, + _asyncRethrow(object, completer) { + completer.completeError$2(A.unwrapException(object), A.getTraceFromException(object)); + }, + _awaitOnObject(object, bodyFunction) { + var t1, future, + thenCallback = new A._awaitOnObject_closure(bodyFunction), + errorCallback = new A._awaitOnObject_closure0(bodyFunction); + if (object instanceof A._Future) + object._thenAwait$1$2(thenCallback, errorCallback, type$.dynamic); + else { + t1 = type$.dynamic; + if (object instanceof A._Future) + object.then$1$2$onError(thenCallback, errorCallback, t1); + else { + future = new A._Future($.Zone__current, type$._Future_dynamic); + future._state = 8; + future._resultOrListeners = object; + future._thenAwait$1$2(thenCallback, errorCallback, t1); + } + } + }, + _wrapJsFunctionForAsync($function) { + var $protected = function(fn, ERROR) { + return function(errorCode, result) { + while (true) { + try { + fn(errorCode, result); + break; + } catch (error) { + result = error; + errorCode = ERROR; + } + } + }; + }($function, 1); + return $.Zone__current.registerBinaryCallback$3$1(new A._wrapJsFunctionForAsync_closure($protected), type$.void, type$.int, type$.dynamic); + }, + AsyncError_defaultStackTrace(error) { + var stackTrace; + if (type$.Error._is(error)) { + stackTrace = error.get$stackTrace(); + if (stackTrace != null) + return stackTrace; + } + return B.C__StringStackTrace; + }, + _interceptError(error, stackTrace) { + if ($.Zone__current === B.C__RootZone) + return null; + return null; + }, + _interceptUserError(error, stackTrace) { + if ($.Zone__current !== B.C__RootZone) + A._interceptError(error, stackTrace); + if (stackTrace == null) + if (type$.Error._is(error)) { + stackTrace = error.get$stackTrace(); + if (stackTrace == null) { + A.Primitives_trySetStackTrace(error, B.C__StringStackTrace); + stackTrace = B.C__StringStackTrace; + } + } else + stackTrace = B.C__StringStackTrace; + else if (type$.Error._is(error)) + A.Primitives_trySetStackTrace(error, stackTrace); + return new A.AsyncError(error, stackTrace); + }, + _Future__chainCoreFuture(source, target, sync) { + var t2, t3, ignoreError, listeners, _box_0 = {}, + t1 = _box_0.source = source; + for (t2 = type$._Future_dynamic; t3 = t1._state, (t3 & 4) !== 0; t1 = source) { + source = t2._as(t1._resultOrListeners); + _box_0.source = source; + } + if (t1 === target) { + target._asyncCompleteError$2(new A.ArgumentError(true, t1, null, "Cannot complete a future with itself"), A.StackTrace_current()); + return; + } + ignoreError = target._state & 1; + t2 = t1._state = t3 | ignoreError; + if ((t2 & 24) === 0) { + listeners = type$.nullable__FutureListener_dynamic_dynamic._as(target._resultOrListeners); + target._state = target._state & 1 | 4; + target._resultOrListeners = t1; + t1._prependListeners$1(listeners); + return; + } + if (!sync) + if (target._resultOrListeners == null) + t1 = (t2 & 16) === 0 || ignoreError !== 0; + else + t1 = false; + else + t1 = true; + if (t1) { + listeners = target._removeListeners$0(); + target._cloneResult$1(_box_0.source); + A._Future__propagateToListeners(target, listeners); + return; + } + target._state ^= 2; + A._rootScheduleMicrotask(null, null, target._zone, type$.void_Function._as(new A._Future__chainCoreFuture_closure(_box_0, target))); + }, + _Future__propagateToListeners(source, listeners) { + var t2, t3, t4, _box_0, t5, t6, hasError, asyncError, nextListener, nextListener0, sourceResult, t7, zone, oldZone, result, current, _box_1 = {}, + t1 = _box_1.source = source; + for (t2 = type$.AsyncError, t3 = type$.nullable__FutureListener_dynamic_dynamic, t4 = type$.Future_dynamic; true;) { + _box_0 = {}; + t5 = t1._state; + t6 = (t5 & 16) === 0; + hasError = !t6; + if (listeners == null) { + if (hasError && (t5 & 1) === 0) { + asyncError = t2._as(t1._resultOrListeners); + A._rootHandleError(asyncError.error, asyncError.stackTrace); + } + return; + } + _box_0.listener = listeners; + nextListener = listeners._nextListener; + for (t1 = listeners; nextListener != null; t1 = nextListener, nextListener = nextListener0) { + t1._nextListener = null; + A._Future__propagateToListeners(_box_1.source, t1); + _box_0.listener = nextListener; + nextListener0 = nextListener._nextListener; + } + t5 = _box_1.source; + sourceResult = t5._resultOrListeners; + _box_0.listenerHasError = hasError; + _box_0.listenerValueOrError = sourceResult; + if (t6) { + t7 = t1.state; + t7 = (t7 & 1) !== 0 || (t7 & 15) === 8; + } else + t7 = true; + if (t7) { + zone = t1.result._zone; + if (hasError) { + t5 = t5._zone === zone; + t5 = !(t5 || t5); + } else + t5 = false; + if (t5) { + t2._as(sourceResult); + A._rootHandleError(sourceResult.error, sourceResult.stackTrace); + return; + } + oldZone = $.Zone__current; + if (oldZone !== zone) + $.Zone__current = zone; + else + oldZone = null; + t1 = t1.state; + if ((t1 & 15) === 8) + new A._Future__propagateToListeners_handleWhenCompleteCallback(_box_0, _box_1, hasError).call$0(); + else if (t6) { + if ((t1 & 1) !== 0) + new A._Future__propagateToListeners_handleValueCallback(_box_0, sourceResult).call$0(); + } else if ((t1 & 2) !== 0) + new A._Future__propagateToListeners_handleError(_box_1, _box_0).call$0(); + if (oldZone != null) + $.Zone__current = oldZone; + t1 = _box_0.listenerValueOrError; + if (t1 instanceof A._Future) { + t5 = _box_0.listener.$ti; + t5 = t5._eval$1("Future<2>")._is(t1) || !t5._rest[1]._is(t1); + } else + t5 = false; + if (t5) { + t4._as(t1); + result = _box_0.listener.result; + if ((t1._state & 24) !== 0) { + current = t3._as(result._resultOrListeners); + result._resultOrListeners = null; + listeners = result._reverseListeners$1(current); + result._state = t1._state & 30 | result._state & 1; + result._resultOrListeners = t1._resultOrListeners; + _box_1.source = t1; + continue; + } else + A._Future__chainCoreFuture(t1, result, true); + return; + } + } + result = _box_0.listener.result; + current = t3._as(result._resultOrListeners); + result._resultOrListeners = null; + listeners = result._reverseListeners$1(current); + t1 = _box_0.listenerHasError; + t5 = _box_0.listenerValueOrError; + if (!t1) { + result.$ti._precomputed1._as(t5); + result._state = 8; + result._resultOrListeners = t5; + } else { + t2._as(t5); + result._state = result._state & 1 | 16; + result._resultOrListeners = t5; + } + _box_1.source = result; + t1 = result; + } + }, + _registerErrorHandler(errorHandler, zone) { + var t1; + if (type$.dynamic_Function_Object_StackTrace._is(errorHandler)) + return zone.registerBinaryCallback$3$1(errorHandler, type$.dynamic, type$.Object, type$.StackTrace); + t1 = type$.dynamic_Function_Object; + if (t1._is(errorHandler)) + return t1._as(errorHandler); + throw A.wrapException(A.ArgumentError$value(errorHandler, "onError", string$.Error_)); + }, + _microtaskLoop() { + var entry, next; + for (entry = $._nextCallback; entry != null; entry = $._nextCallback) { + $._lastPriorityCallback = null; + next = entry.next; + $._nextCallback = next; + if (next == null) + $._lastCallback = null; + entry.callback.call$0(); + } + }, + _startMicrotaskLoop() { + $._isInCallbackLoop = true; + try { + A._microtaskLoop(); + } finally { + $._lastPriorityCallback = null; + $._isInCallbackLoop = false; + if ($._nextCallback != null) + $.$get$_AsyncRun__scheduleImmediateClosure().call$1(A.async___startMicrotaskLoop$closure()); + } + }, + _scheduleAsyncCallback(callback) { + var newEntry = new A._AsyncCallbackEntry(callback), + lastCallback = $._lastCallback; + if (lastCallback == null) { + $._nextCallback = $._lastCallback = newEntry; + if (!$._isInCallbackLoop) + $.$get$_AsyncRun__scheduleImmediateClosure().call$1(A.async___startMicrotaskLoop$closure()); + } else + $._lastCallback = lastCallback.next = newEntry; + }, + _schedulePriorityAsyncCallback(callback) { + var entry, lastPriorityCallback, next, + t1 = $._nextCallback; + if (t1 == null) { + A._scheduleAsyncCallback(callback); + $._lastPriorityCallback = $._lastCallback; + return; + } + entry = new A._AsyncCallbackEntry(callback); + lastPriorityCallback = $._lastPriorityCallback; + if (lastPriorityCallback == null) { + entry.next = t1; + $._nextCallback = $._lastPriorityCallback = entry; + } else { + next = lastPriorityCallback.next; + entry.next = next; + $._lastPriorityCallback = lastPriorityCallback.next = entry; + if (next == null) + $._lastCallback = entry; + } + }, + scheduleMicrotask(callback) { + var _null = null, + currentZone = $.Zone__current; + if (B.C__RootZone === currentZone) { + A._rootScheduleMicrotask(_null, _null, B.C__RootZone, callback); + return; + } + A._rootScheduleMicrotask(_null, _null, currentZone, type$.void_Function._as(currentZone.bindCallbackGuarded$1(callback))); + }, + StreamIterator_StreamIterator(stream, $T) { + A.checkNotNullable(stream, "stream", type$.Object); + return new A._StreamIterator($T._eval$1("_StreamIterator<0>")); + }, + _runGuarded(notificationHandler) { + return; + }, + _BufferingStreamSubscription__registerErrorHandler(zone, handleError) { + if (handleError == null) + handleError = A.async___nullErrorHandler$closure(); + if (type$.void_Function_Object_StackTrace._is(handleError)) + return zone.registerBinaryCallback$3$1(handleError, type$.dynamic, type$.Object, type$.StackTrace); + if (type$.void_Function_Object._is(handleError)) + return type$.dynamic_Function_Object._as(handleError); + throw A.wrapException(A.ArgumentError$("handleError callback must take either an Object (the error), or both an Object (the error) and a StackTrace.", null)); + }, + _nullErrorHandler(error, stackTrace) { + A._rootHandleError(error, stackTrace); + }, + _nullDoneHandler() { + }, + _rootHandleError(error, stackTrace) { + A._schedulePriorityAsyncCallback(new A._rootHandleError_closure(error, stackTrace)); + }, + _rootRun($self, $parent, zone, f, $R) { + var old, + t1 = $.Zone__current; + if (t1 === zone) + return f.call$0(); + $.Zone__current = zone; + old = t1; + try { + t1 = f.call$0(); + return t1; + } finally { + $.Zone__current = old; + } + }, + _rootRunUnary($self, $parent, zone, f, arg, $R, $T) { + var old, + t1 = $.Zone__current; + if (t1 === zone) + return f.call$1(arg); + $.Zone__current = zone; + old = t1; + try { + t1 = f.call$1(arg); + return t1; + } finally { + $.Zone__current = old; + } + }, + _rootRunBinary($self, $parent, zone, f, arg1, arg2, $R, T1, T2) { + var old, + t1 = $.Zone__current; + if (t1 === zone) + return f.call$2(arg1, arg2); + $.Zone__current = zone; + old = t1; + try { + t1 = f.call$2(arg1, arg2); + return t1; + } finally { + $.Zone__current = old; + } + }, + _rootScheduleMicrotask($self, $parent, zone, f) { + type$.void_Function._as(f); + if (B.C__RootZone !== zone) + f = zone.bindCallbackGuarded$1(f); + A._scheduleAsyncCallback(f); + }, + _AsyncRun__initializeScheduleImmediate_internalCallback: function _AsyncRun__initializeScheduleImmediate_internalCallback(t0) { + this._box_0 = t0; + }, + _AsyncRun__initializeScheduleImmediate_closure: function _AsyncRun__initializeScheduleImmediate_closure(t0, t1, t2) { + this._box_0 = t0; + this.div = t1; + this.span = t2; + }, + _AsyncRun__scheduleImmediateJsOverride_internalCallback: function _AsyncRun__scheduleImmediateJsOverride_internalCallback(t0) { + this.callback = t0; + }, + _AsyncRun__scheduleImmediateWithSetImmediate_internalCallback: function _AsyncRun__scheduleImmediateWithSetImmediate_internalCallback(t0) { + this.callback = t0; + }, + _TimerImpl: function _TimerImpl() { + }, + _TimerImpl_internalCallback: function _TimerImpl_internalCallback(t0, t1) { + this.$this = t0; + this.callback = t1; + }, + _AsyncAwaitCompleter: function _AsyncAwaitCompleter(t0, t1) { + this._future = t0; + this.isSync = false; + this.$ti = t1; + }, + _awaitOnObject_closure: function _awaitOnObject_closure(t0) { + this.bodyFunction = t0; + }, + _awaitOnObject_closure0: function _awaitOnObject_closure0(t0) { + this.bodyFunction = t0; + }, + _wrapJsFunctionForAsync_closure: function _wrapJsFunctionForAsync_closure(t0) { + this.$protected = t0; + }, + AsyncError: function AsyncError(t0, t1) { + this.error = t0; + this.stackTrace = t1; + }, + _BroadcastStream: function _BroadcastStream(t0, t1) { + this._async$_controller = t0; + this.$ti = t1; + }, + _BroadcastSubscription: function _BroadcastSubscription(t0, t1, t2, t3, t4) { + var _ = this; + _._eventState = 0; + _._async$_previous = _._async$_next = null; + _._async$_controller = t0; + _._onData = t1; + _._zone = t2; + _._state = t3; + _._pending = null; + _.$ti = t4; + }, + _BroadcastStreamController: function _BroadcastStreamController() { + }, + _SyncBroadcastStreamController: function _SyncBroadcastStreamController(t0, t1, t2) { + var _ = this; + _.onListen = t0; + _.onCancel = t1; + _._state = 0; + _._lastSubscription = _._firstSubscription = null; + _.$ti = t2; + }, + _SyncBroadcastStreamController__sendData_closure: function _SyncBroadcastStreamController__sendData_closure(t0, t1) { + this.$this = t0; + this.data = t1; + }, + _Completer: function _Completer() { + }, + _AsyncCompleter: function _AsyncCompleter(t0, t1) { + this.future = t0; + this.$ti = t1; + }, + _FutureListener: function _FutureListener(t0, t1, t2, t3, t4) { + var _ = this; + _._nextListener = null; + _.result = t0; + _.state = t1; + _.callback = t2; + _.errorCallback = t3; + _.$ti = t4; + }, + _Future: function _Future(t0, t1) { + var _ = this; + _._state = 0; + _._zone = t0; + _._resultOrListeners = null; + _.$ti = t1; + }, + _Future__addListener_closure: function _Future__addListener_closure(t0, t1) { + this.$this = t0; + this.listener = t1; + }, + _Future__prependListeners_closure: function _Future__prependListeners_closure(t0, t1) { + this._box_0 = t0; + this.$this = t1; + }, + _Future__chainForeignFuture_closure: function _Future__chainForeignFuture_closure(t0) { + this.$this = t0; + }, + _Future__chainForeignFuture_closure0: function _Future__chainForeignFuture_closure0(t0) { + this.$this = t0; + }, + _Future__chainForeignFuture_closure1: function _Future__chainForeignFuture_closure1(t0, t1, t2) { + this.$this = t0; + this.e = t1; + this.s = t2; + }, + _Future__chainCoreFuture_closure: function _Future__chainCoreFuture_closure(t0, t1) { + this._box_0 = t0; + this.target = t1; + }, + _Future__asyncCompleteWithValue_closure: function _Future__asyncCompleteWithValue_closure(t0, t1) { + this.$this = t0; + this.value = t1; + }, + _Future__asyncCompleteError_closure: function _Future__asyncCompleteError_closure(t0, t1, t2) { + this.$this = t0; + this.error = t1; + this.stackTrace = t2; + }, + _Future__propagateToListeners_handleWhenCompleteCallback: function _Future__propagateToListeners_handleWhenCompleteCallback(t0, t1, t2) { + this._box_0 = t0; + this._box_1 = t1; + this.hasError = t2; + }, + _Future__propagateToListeners_handleWhenCompleteCallback_closure: function _Future__propagateToListeners_handleWhenCompleteCallback_closure(t0, t1) { + this.joinedResult = t0; + this.originalSource = t1; + }, + _Future__propagateToListeners_handleWhenCompleteCallback_closure0: function _Future__propagateToListeners_handleWhenCompleteCallback_closure0(t0) { + this.joinedResult = t0; + }, + _Future__propagateToListeners_handleValueCallback: function _Future__propagateToListeners_handleValueCallback(t0, t1) { + this._box_0 = t0; + this.sourceResult = t1; + }, + _Future__propagateToListeners_handleError: function _Future__propagateToListeners_handleError(t0, t1) { + this._box_1 = t0; + this._box_0 = t1; + }, + _AsyncCallbackEntry: function _AsyncCallbackEntry(t0) { + this.callback = t0; + this.next = null; + }, + Stream: function Stream() { + }, + Stream_length_closure: function Stream_length_closure(t0, t1) { + this._box_0 = t0; + this.$this = t1; + }, + Stream_length_closure0: function Stream_length_closure0(t0, t1) { + this._box_0 = t0; + this.future = t1; + }, + _ControllerStream: function _ControllerStream() { + }, + _ControllerSubscription: function _ControllerSubscription() { + }, + _BufferingStreamSubscription: function _BufferingStreamSubscription() { + }, + _StreamImpl: function _StreamImpl() { + }, + _DelayedEvent: function _DelayedEvent() { + }, + _DelayedData: function _DelayedData(t0, t1) { + this.value = t0; + this.next = null; + this.$ti = t1; + }, + _PendingEvents: function _PendingEvents(t0) { + var _ = this; + _._state = 0; + _.lastPendingEvent = _.firstPendingEvent = null; + _.$ti = t0; + }, + _PendingEvents_schedule_closure: function _PendingEvents_schedule_closure(t0, t1) { + this.$this = t0; + this.dispatch = t1; + }, + _DoneStreamSubscription: function _DoneStreamSubscription(t0, t1) { + var _ = this; + _._state = 1; + _._zone = t0; + _._onDone = null; + _.$ti = t1; + }, + _StreamIterator: function _StreamIterator(t0) { + this.$ti = t0; + }, + _Zone: function _Zone() { + }, + _rootHandleError_closure: function _rootHandleError_closure(t0, t1) { + this.error = t0; + this.stackTrace = t1; + }, + _RootZone: function _RootZone() { + }, + _RootZone_bindCallbackGuarded_closure: function _RootZone_bindCallbackGuarded_closure(t0, t1) { + this.$this = t0; + this.f = t1; + }, + _HashMap__getTableEntry(table, key) { + var entry = table[key]; + return entry === table ? null : entry; + }, + _HashMap__setTableEntry(table, key, value) { + if (value == null) + table[key] = table; + else + table[key] = value; + }, + _HashMap__newHashTable() { + var table = Object.create(null); + A._HashMap__setTableEntry(table, "", table); + delete table[""]; + return table; + }, + LinkedHashMap_LinkedHashMap$_literal(keyValuePairs, $K, $V) { + return $K._eval$1("@<0>")._bind$1($V)._eval$1("LinkedHashMap<1,2>")._as(A.fillLiteralMap(keyValuePairs, new A.JsLinkedHashMap($K._eval$1("@<0>")._bind$1($V)._eval$1("JsLinkedHashMap<1,2>")))); + }, + LinkedHashMap_LinkedHashMap$_empty($K, $V) { + return new A.JsLinkedHashMap($K._eval$1("@<0>")._bind$1($V)._eval$1("JsLinkedHashMap<1,2>")); + }, + MapBase_mapToString(m) { + var result, t1; + if (A.isToStringVisiting(m)) + return "{...}"; + result = new A.StringBuffer(""); + try { + t1 = {}; + B.JSArray_methods.add$1($.toStringVisiting, m); + result._contents += "{"; + t1.first = true; + m.forEach$1(0, new A.MapBase_mapToString_closure(t1, result)); + result._contents += "}"; + } finally { + if (0 >= $.toStringVisiting.length) + return A.ioore($.toStringVisiting, -1); + $.toStringVisiting.pop(); + } + t1 = result._contents; + return t1.charCodeAt(0) == 0 ? t1 : t1; + }, + _HashMap: function _HashMap() { + }, + _IdentityHashMap: function _IdentityHashMap(t0) { + var _ = this; + _._collection$_length = 0; + _._collection$_keys = _._collection$_rest = _._collection$_nums = _._collection$_strings = null; + _.$ti = t0; + }, + _HashMapKeyIterable: function _HashMapKeyIterable(t0, t1) { + this._collection$_map = t0; + this.$ti = t1; + }, + _HashMapKeyIterator: function _HashMapKeyIterator(t0, t1, t2) { + var _ = this; + _._collection$_map = t0; + _._collection$_keys = t1; + _._offset = 0; + _._collection$_current = null; + _.$ti = t2; + }, + ListBase: function ListBase() { + }, + MapBase: function MapBase() { + }, + MapBase_mapToString_closure: function MapBase_mapToString_closure(t0, t1) { + this._box_0 = t0; + this.result = t1; + }, + _UnmodifiableMapMixin: function _UnmodifiableMapMixin() { + }, + MapView: function MapView() { + }, + UnmodifiableMapView: function UnmodifiableMapView() { + }, + _UnmodifiableMapView_MapView__UnmodifiableMapMixin: function _UnmodifiableMapView_MapView__UnmodifiableMapMixin() { + }, + _Base64Encoder_encodeChunk(alphabet, bytes, start, end, isLast, output, outputIndex, state) { + var t1, t2, t3, i, byteOr, byte, outputIndex0, t4, t5, outputIndex1, + bits = state >>> 2, + expectedChars = 3 - (state & 3); + for (t1 = bytes.length, t2 = alphabet.length, t3 = output.$flags | 0, i = start, byteOr = 0; i < end; ++i) { + if (!(i < t1)) + return A.ioore(bytes, i); + byte = bytes[i]; + byteOr |= byte; + bits = (bits << 8 | byte) & 16777215; + --expectedChars; + if (expectedChars === 0) { + outputIndex0 = outputIndex + 1; + t4 = bits >>> 18 & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + t3 & 2 && A.throwUnsupportedOperation(output); + t5 = output.length; + if (!(outputIndex < t5)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t4); + outputIndex = outputIndex0 + 1; + t4 = bits >>> 12 & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + if (!(outputIndex0 < t5)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t4); + outputIndex0 = outputIndex + 1; + t4 = bits >>> 6 & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + if (!(outputIndex < t5)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t4); + outputIndex = outputIndex0 + 1; + t4 = bits & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + if (!(outputIndex0 < t5)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t4); + bits = 0; + expectedChars = 3; + } + } + if (byteOr >= 0 && byteOr <= 255) { + if (expectedChars < 3) { + outputIndex0 = outputIndex + 1; + outputIndex1 = outputIndex0 + 1; + if (3 - expectedChars === 1) { + t1 = bits >>> 2 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + t3 & 2 && A.throwUnsupportedOperation(output); + t3 = output.length; + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t1); + t1 = bits << 4 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + if (!(outputIndex0 < t3)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t1); + outputIndex = outputIndex1 + 1; + if (!(outputIndex1 < t3)) + return A.ioore(output, outputIndex1); + output[outputIndex1] = 61; + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = 61; + } else { + t1 = bits >>> 10 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + t3 & 2 && A.throwUnsupportedOperation(output); + t3 = output.length; + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t1); + t1 = bits >>> 4 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + if (!(outputIndex0 < t3)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t1); + outputIndex = outputIndex1 + 1; + t1 = bits << 2 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + if (!(outputIndex1 < t3)) + return A.ioore(output, outputIndex1); + output[outputIndex1] = alphabet.charCodeAt(t1); + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = 61; + } + return 0; + } + return (bits << 2 | 3 - expectedChars) >>> 0; + } + for (i = start; i < end;) { + if (!(i < t1)) + return A.ioore(bytes, i); + byte = bytes[i]; + if (byte > 255) + break; + ++i; + } + if (!(i < t1)) + return A.ioore(bytes, i); + throw A.wrapException(A.ArgumentError$value(bytes, "Not a byte value at index " + i + ": 0x" + B.JSInt_methods.toRadixString$1(bytes[i], 16), null)); + }, + _Base64Decoder_decodeChunk(input, start, end, output, outIndex, state) { + var t1, t2, t3, i, charOr, char, t4, code, outIndex0, expectedPadding, + _s31_ = "Invalid encoding before padding", + _s17_ = "Invalid character", + bits = B.JSInt_methods._shrOtherPositive$1(state, 2), + count = state & 3, + inverseAlphabet = $.$get$_Base64Decoder__inverseAlphabet(); + for (t1 = input.length, t2 = inverseAlphabet.length, t3 = output.$flags | 0, i = start, charOr = 0; i < end; ++i) { + if (!(i < t1)) + return A.ioore(input, i); + char = input.charCodeAt(i); + charOr |= char; + t4 = char & 127; + if (!(t4 < t2)) + return A.ioore(inverseAlphabet, t4); + code = inverseAlphabet[t4]; + if (code >= 0) { + bits = (bits << 6 | code) & 16777215; + count = count + 1 & 3; + if (count === 0) { + outIndex0 = outIndex + 1; + t3 & 2 && A.throwUnsupportedOperation(output); + t4 = output.length; + if (!(outIndex < t4)) + return A.ioore(output, outIndex); + output[outIndex] = bits >>> 16 & 255; + outIndex = outIndex0 + 1; + if (!(outIndex0 < t4)) + return A.ioore(output, outIndex0); + output[outIndex0] = bits >>> 8 & 255; + outIndex0 = outIndex + 1; + if (!(outIndex < t4)) + return A.ioore(output, outIndex); + output[outIndex] = bits & 255; + outIndex = outIndex0; + bits = 0; + } + continue; + } else if (code === -1 && count > 1) { + if (charOr > 127) + break; + if (count === 3) { + if ((bits & 3) !== 0) + throw A.wrapException(A.FormatException$(_s31_, input, i)); + outIndex0 = outIndex + 1; + t3 & 2 && A.throwUnsupportedOperation(output); + t1 = output.length; + if (!(outIndex < t1)) + return A.ioore(output, outIndex); + output[outIndex] = bits >>> 10; + if (!(outIndex0 < t1)) + return A.ioore(output, outIndex0); + output[outIndex0] = bits >>> 2; + } else { + if ((bits & 15) !== 0) + throw A.wrapException(A.FormatException$(_s31_, input, i)); + t3 & 2 && A.throwUnsupportedOperation(output); + if (!(outIndex < output.length)) + return A.ioore(output, outIndex); + output[outIndex] = bits >>> 4; + } + expectedPadding = (3 - count) * 3; + if (char === 37) + expectedPadding += 2; + return A._Base64Decoder__checkPadding(input, i + 1, end, -expectedPadding - 1); + } + throw A.wrapException(A.FormatException$(_s17_, input, i)); + } + if (charOr >= 0 && charOr <= 127) + return (bits << 2 | count) >>> 0; + for (i = start; i < end; ++i) { + if (!(i < t1)) + return A.ioore(input, i); + if (input.charCodeAt(i) > 127) + break; + } + throw A.wrapException(A.FormatException$(_s17_, input, i)); + }, + _Base64Decoder__allocateBuffer(input, start, end, state) { + var paddingStart = A._Base64Decoder__trimPaddingChars(input, start, end), + $length = (state & 3) + (paddingStart - start), + bufferLength = B.JSInt_methods._shrOtherPositive$1($length, 2) * 3, + remainderLength = $length & 3; + if (remainderLength !== 0 && paddingStart < end) + bufferLength += remainderLength - 1; + if (bufferLength > 0) + return new Uint8Array(bufferLength); + return $.$get$_Base64Decoder__emptyBuffer(); + }, + _Base64Decoder__trimPaddingChars(input, start, end) { + var char, + t1 = input.length, + newEnd = end, + index = newEnd, + padding = 0; + while (true) { + if (!(index > start && padding < 2)) + break; + c$0: { + --index; + if (!(index >= 0 && index < t1)) + return A.ioore(input, index); + char = input.charCodeAt(index); + if (char === 61) { + ++padding; + newEnd = index; + break c$0; + } + if ((char | 32) === 100) { + if (index === start) + break; + --index; + if (!(index >= 0 && index < t1)) + return A.ioore(input, index); + char = input.charCodeAt(index); + } + if (char === 51) { + if (index === start) + break; + --index; + if (!(index >= 0 && index < t1)) + return A.ioore(input, index); + char = input.charCodeAt(index); + } + if (char === 37) { + ++padding; + newEnd = index; + break c$0; + } + break; + } + } + return newEnd; + }, + _Base64Decoder__checkPadding(input, start, end, state) { + var expectedPadding, t1, char; + if (start === end) + return state; + expectedPadding = -state - 1; + for (t1 = input.length; expectedPadding > 0;) { + if (!(start < t1)) + return A.ioore(input, start); + char = input.charCodeAt(start); + if (expectedPadding === 3) { + if (char === 61) { + expectedPadding -= 3; + ++start; + break; + } + if (char === 37) { + --expectedPadding; + ++start; + if (start === end) + break; + if (!(start < t1)) + return A.ioore(input, start); + char = input.charCodeAt(start); + } else + break; + } + if ((expectedPadding > 3 ? expectedPadding - 3 : expectedPadding) === 2) { + if (char !== 51) + break; + ++start; + --expectedPadding; + if (start === end) + break; + if (!(start < t1)) + return A.ioore(input, start); + char = input.charCodeAt(start); + } + if ((char | 32) !== 100) + break; + ++start; + --expectedPadding; + if (start === end) + break; + } + if (start !== end) + throw A.wrapException(A.FormatException$("Invalid padding character", input, start)); + return -expectedPadding - 1; + }, + Base64Codec: function Base64Codec() { + }, + Base64Encoder: function Base64Encoder() { + }, + _Base64Encoder: function _Base64Encoder(t0) { + this._convert$_state = 0; + this._alphabet = t0; + }, + Base64Decoder: function Base64Decoder() { + }, + _Base64Decoder: function _Base64Decoder() { + this._convert$_state = 0; + }, + Codec: function Codec() { + }, + Converter: function Converter() { + }, + Error__throw(error, stackTrace) { + error = A.wrapException(error); + if (error == null) + error = type$.Object._as(error); + error.stack = stackTrace.toString$0(0); + throw error; + throw A.wrapException("unreachable"); + }, + List_List$filled($length, fill, growable, $E) { + var i, + result = J.JSArray_JSArray$fixed($length, $E); + if ($length !== 0 && fill != null) + for (i = 0; i < $length; ++i) + result[i] = fill; + return result; + }, + List_List$of(elements, growable, $E) { + var t1 = A.List_List$_of(elements, $E); + return t1; + }, + List_List$_of(elements, $E) { + var list, t1; + if (Array.isArray(elements)) + return A._setArrayType(elements.slice(0), $E._eval$1("JSArray<0>")); + list = A._setArrayType([], $E._eval$1("JSArray<0>")); + for (t1 = J.get$iterator$ax(elements); t1.moveNext$0();) + B.JSArray_methods.add$1(list, t1.get$current()); + return list; + }, + String_String$fromCharCodes(charCodes) { + var t1; + A.RangeError_checkNotNegative(0, "start"); + t1 = A.String__stringFromUint8List(charCodes, 0, null); + return t1; + }, + String__stringFromUint8List(charCodes, start, endOrNull) { + var len = charCodes.length; + if (start >= len) + return ""; + return A.Primitives_stringFromNativeUint8List(charCodes, start, len); + }, + StringBuffer__writeAll(string, objects, separator) { + var iterator = J.get$iterator$ax(objects); + if (!iterator.moveNext$0()) + return string; + if (separator.length === 0) { + do + string += A.S(iterator.get$current()); + while (iterator.moveNext$0()); + } else { + string += A.S(iterator.get$current()); + for (; iterator.moveNext$0();) + string = string + separator + A.S(iterator.get$current()); + } + return string; + }, + NoSuchMethodError_NoSuchMethodError$withInvocation(receiver, invocation) { + return new A.NoSuchMethodError(receiver, invocation.get$memberName(), invocation.get$positionalArguments(), invocation.get$namedArguments()); + }, + StackTrace_current() { + return A.getTraceFromException(new Error()); + }, + DateTime__fourDigits(n) { + var absN = Math.abs(n), + sign = n < 0 ? "-" : ""; + if (absN >= 1000) + return "" + n; + if (absN >= 100) + return sign + "0" + absN; + if (absN >= 10) + return sign + "00" + absN; + return sign + "000" + absN; + }, + DateTime__threeDigits(n) { + if (n >= 100) + return "" + n; + if (n >= 10) + return "0" + n; + return "00" + n; + }, + DateTime__twoDigits(n) { + if (n >= 10) + return "" + n; + return "0" + n; + }, + Error_safeToString(object) { + if (typeof object == "number" || A._isBool(object) || object == null) + return J.toString$0$(object); + if (typeof object == "string") + return JSON.stringify(object); + return A.Primitives_safeToString(object); + }, + Error_throwWithStackTrace(error, stackTrace) { + A.checkNotNullable(error, "error", type$.Object); + A.checkNotNullable(stackTrace, "stackTrace", type$.StackTrace); + A.Error__throw(error, stackTrace); + }, + AssertionError$(message) { + return new A.AssertionError(message); + }, + ArgumentError$(message, $name) { + return new A.ArgumentError(false, null, $name, message); + }, + ArgumentError$value(value, $name, message) { + return new A.ArgumentError(true, value, $name, message); + }, + RangeError$value(value, $name) { + return new A.RangeError(null, null, true, value, $name, "Value not in range"); + }, + RangeError$range(invalidValue, minValue, maxValue, $name, message) { + return new A.RangeError(minValue, maxValue, true, invalidValue, $name, "Invalid value"); + }, + RangeError_checkValidRange(start, end, $length) { + if (0 > start || start > $length) + throw A.wrapException(A.RangeError$range(start, 0, $length, "start", null)); + if (end != null) { + if (start > end || end > $length) + throw A.wrapException(A.RangeError$range(end, start, $length, "end", null)); + return end; + } + return $length; + }, + RangeError_checkNotNegative(value, $name) { + if (value < 0) + throw A.wrapException(A.RangeError$range(value, 0, null, $name, null)); + return value; + }, + IndexError$withLength(invalidValue, $length, indexable, $name) { + return new A.IndexError($length, true, invalidValue, $name, "Index out of range"); + }, + UnsupportedError$(message) { + return new A.UnsupportedError(message); + }, + UnimplementedError$(message) { + return new A.UnimplementedError(message); + }, + StateError$(message) { + return new A.StateError(message); + }, + ConcurrentModificationError$(modifiedObject) { + return new A.ConcurrentModificationError(modifiedObject); + }, + Exception_Exception(message) { + return new A._Exception(message); + }, + FormatException$(message, source, offset) { + return new A.FormatException(message, source, offset); + }, + Iterable_iterableToShortString(iterable, leftDelimiter, rightDelimiter) { + var parts, t1; + if (A.isToStringVisiting(iterable)) { + if (leftDelimiter === "(" && rightDelimiter === ")") + return "(...)"; + return leftDelimiter + "..." + rightDelimiter; + } + parts = A._setArrayType([], type$.JSArray_String); + B.JSArray_methods.add$1($.toStringVisiting, iterable); + try { + A._iterablePartsToStrings(iterable, parts); + } finally { + if (0 >= $.toStringVisiting.length) + return A.ioore($.toStringVisiting, -1); + $.toStringVisiting.pop(); + } + t1 = A.StringBuffer__writeAll(leftDelimiter, type$.Iterable_dynamic._as(parts), ", ") + rightDelimiter; + return t1.charCodeAt(0) == 0 ? t1 : t1; + }, + Iterable_iterableToFullString(iterable, leftDelimiter, rightDelimiter) { + var buffer, t1; + if (A.isToStringVisiting(iterable)) + return leftDelimiter + "..." + rightDelimiter; + buffer = new A.StringBuffer(leftDelimiter); + B.JSArray_methods.add$1($.toStringVisiting, iterable); + try { + t1 = buffer; + t1._contents = A.StringBuffer__writeAll(t1._contents, iterable, ", "); + } finally { + if (0 >= $.toStringVisiting.length) + return A.ioore($.toStringVisiting, -1); + $.toStringVisiting.pop(); + } + buffer._contents += rightDelimiter; + t1 = buffer._contents; + return t1.charCodeAt(0) == 0 ? t1 : t1; + }, + _iterablePartsToStrings(iterable, parts) { + var next, ultimateString, penultimateString, penultimate, ultimate, ultimate0, elision, + it = iterable.get$iterator(iterable), + $length = 0, count = 0; + while (true) { + if (!($length < 80 || count < 3)) + break; + if (!it.moveNext$0()) + return; + next = A.S(it.get$current()); + B.JSArray_methods.add$1(parts, next); + $length += next.length + 2; + ++count; + } + if (!it.moveNext$0()) { + if (count <= 5) + return; + if (0 >= parts.length) + return A.ioore(parts, -1); + ultimateString = parts.pop(); + if (0 >= parts.length) + return A.ioore(parts, -1); + penultimateString = parts.pop(); + } else { + penultimate = it.get$current(); + ++count; + if (!it.moveNext$0()) { + if (count <= 4) { + B.JSArray_methods.add$1(parts, A.S(penultimate)); + return; + } + ultimateString = A.S(penultimate); + if (0 >= parts.length) + return A.ioore(parts, -1); + penultimateString = parts.pop(); + $length += ultimateString.length + 2; + } else { + ultimate = it.get$current(); + ++count; + for (; it.moveNext$0(); penultimate = ultimate, ultimate = ultimate0) { + ultimate0 = it.get$current(); + ++count; + if (count > 100) { + while (true) { + if (!($length > 75 && count > 3)) + break; + if (0 >= parts.length) + return A.ioore(parts, -1); + $length -= parts.pop().length + 2; + --count; + } + B.JSArray_methods.add$1(parts, "..."); + return; + } + } + penultimateString = A.S(penultimate); + ultimateString = A.S(ultimate); + $length += ultimateString.length + penultimateString.length + 4; + } + } + if (count > parts.length + 2) { + $length += 5; + elision = "..."; + } else + elision = null; + while (true) { + if (!($length > 80 && parts.length > 3)) + break; + if (0 >= parts.length) + return A.ioore(parts, -1); + $length -= parts.pop().length + 2; + if (elision == null) { + $length += 5; + elision = "..."; + } + } + if (elision != null) + B.JSArray_methods.add$1(parts, elision); + B.JSArray_methods.add$1(parts, penultimateString); + B.JSArray_methods.add$1(parts, ultimateString); + }, + Object_hash(object1, object2) { + var t1 = B.JSInt_methods.get$hashCode(object1); + object2 = B.JSInt_methods.get$hashCode(object2); + object2 = A.SystemHash_finish(A.SystemHash_combine(A.SystemHash_combine($.$get$_hashSeed(), t1), object2)); + return object2; + }, + NoSuchMethodError_toString_closure: function NoSuchMethodError_toString_closure(t0, t1) { + this._box_0 = t0; + this.sb = t1; + }, + DateTime: function DateTime(t0, t1, t2) { + this._value = t0; + this._microsecond = t1; + this.isUtc = t2; + }, + _Enum: function _Enum() { + }, + Error: function Error() { + }, + AssertionError: function AssertionError(t0) { + this.message = t0; + }, + TypeError: function TypeError() { + }, + ArgumentError: function ArgumentError(t0, t1, t2, t3) { + var _ = this; + _._hasValue = t0; + _.invalidValue = t1; + _.name = t2; + _.message = t3; + }, + RangeError: function RangeError(t0, t1, t2, t3, t4, t5) { + var _ = this; + _.start = t0; + _.end = t1; + _._hasValue = t2; + _.invalidValue = t3; + _.name = t4; + _.message = t5; + }, + IndexError: function IndexError(t0, t1, t2, t3, t4) { + var _ = this; + _.length = t0; + _._hasValue = t1; + _.invalidValue = t2; + _.name = t3; + _.message = t4; + }, + NoSuchMethodError: function NoSuchMethodError(t0, t1, t2, t3) { + var _ = this; + _._core$_receiver = t0; + _._core$_memberName = t1; + _._core$_arguments = t2; + _._namedArguments = t3; + }, + UnsupportedError: function UnsupportedError(t0) { + this.message = t0; + }, + UnimplementedError: function UnimplementedError(t0) { + this.message = t0; + }, + StateError: function StateError(t0) { + this.message = t0; + }, + ConcurrentModificationError: function ConcurrentModificationError(t0) { + this.modifiedObject = t0; + }, + OutOfMemoryError: function OutOfMemoryError() { + }, + StackOverflowError: function StackOverflowError() { + }, + _Exception: function _Exception(t0) { + this.message = t0; + }, + FormatException: function FormatException(t0, t1, t2) { + this.message = t0; + this.source = t1; + this.offset = t2; + }, + Iterable: function Iterable() { + }, + Null: function Null() { + }, + Object: function Object() { + }, + _StringStackTrace: function _StringStackTrace() { + }, + StringBuffer: function StringBuffer(t0) { + this._contents = t0; + }, + _convertDartFunctionFast(f) { + var ret, + existing = f.$dart_jsFunction; + if (existing != null) + return existing; + ret = function(_call, f) { + return function() { + return _call(f, Array.prototype.slice.apply(arguments)); + }; + }(A._callDartFunctionFast, f); + ret[$.$get$DART_CLOSURE_PROPERTY_NAME()] = f; + f.$dart_jsFunction = ret; + return ret; + }, + _callDartFunctionFast(callback, $arguments) { + type$.List_dynamic._as($arguments); + type$.Function._as(callback); + return A.Primitives_applyFunction(callback, $arguments, null); + }, + allowInterop(f, $F) { + if (typeof f == "function") + return f; + else + return $F._as(A._convertDartFunctionFast(f)); + }, + _functionToJS1(f) { + var result; + if (typeof f == "function") + throw A.wrapException(A.ArgumentError$("Attempting to rewrap a JS function.", null)); + result = function(_call, f) { + return function(arg1) { + return _call(f, arg1, arguments.length); + }; + }(A._callDartFunctionFast1, f); + result[$.$get$DART_CLOSURE_PROPERTY_NAME()] = f; + return result; + }, + _callDartFunctionFast1(callback, arg1, $length) { + type$.Function._as(callback); + if (A._asInt($length) >= 1) + return callback.call$1(arg1); + return callback.call$0(); + }, + _noJsifyRequired(o) { + return o == null || A._isBool(o) || typeof o == "number" || typeof o == "string" || type$.Int8List._is(o) || type$.Uint8List._is(o) || type$.Uint8ClampedList._is(o) || type$.Int16List._is(o) || type$.Uint16List._is(o) || type$.Int32List._is(o) || type$.Uint32List._is(o) || type$.Float32List._is(o) || type$.Float64List._is(o) || type$.ByteBuffer._is(o) || type$.ByteData._is(o); + }, + jsify(object) { + if (A._noJsifyRequired(object)) + return object; + return new A.jsify__convert(new A._IdentityHashMap(type$._IdentityHashMap_of_nullable_Object_and_nullable_Object)).call$1(object); + }, + callMethod(o, method, args, $T) { + return $T._as(o[method].apply(o, args)); + }, + promiseToFuture(jsPromise, $T) { + var t1 = new A._Future($.Zone__current, $T._eval$1("_Future<0>")), + completer = new A._AsyncCompleter(t1, $T._eval$1("_AsyncCompleter<0>")); + jsPromise.then(A.convertDartClosureToJS(new A.promiseToFuture_closure(completer, $T), 1), A.convertDartClosureToJS(new A.promiseToFuture_closure0(completer), 1)); + return t1; + }, + _noDartifyRequired(o) { + return o == null || typeof o === "boolean" || typeof o === "number" || typeof o === "string" || o instanceof Int8Array || o instanceof Uint8Array || o instanceof Uint8ClampedArray || o instanceof Int16Array || o instanceof Uint16Array || o instanceof Int32Array || o instanceof Uint32Array || o instanceof Float32Array || o instanceof Float64Array || o instanceof ArrayBuffer || o instanceof DataView; + }, + dartify(o) { + if (A._noDartifyRequired(o)) + return o; + return new A.dartify_convert(new A._IdentityHashMap(type$._IdentityHashMap_of_nullable_Object_and_nullable_Object)).call$1(o); + }, + jsify__convert: function jsify__convert(t0) { + this._convertedObjects = t0; + }, + promiseToFuture_closure: function promiseToFuture_closure(t0, t1) { + this.completer = t0; + this.T = t1; + }, + promiseToFuture_closure0: function promiseToFuture_closure0(t0) { + this.completer = t0; + }, + dartify_convert: function dartify_convert(t0) { + this._convertedObjects = t0; + }, + NullRejectionException: function NullRejectionException(t0) { + this.isUndefined = t0; + }, + _JSSecureRandom: function _JSSecureRandom(t0) { + this._math$_buffer = t0; + }, + findNALUIndices(stream) { + var start, pos0, t1, end, + result = A._setArrayType([], type$.JSArray_int), + pos = stream.length, + searchLength = pos - 2; + for (start = 0, pos0 = 0; pos0 < searchLength; start = pos0) { + while (true) { + if (pos0 < searchLength) { + if (!(pos0 >= 0)) + return A.ioore(stream, pos0); + t1 = !(stream[pos0] === 0 && stream[pos0 + 1] === 0 && stream[pos0 + 2] === 1); + } else + t1 = false; + if (!t1) + break; + ++pos0; + } + if (pos0 >= searchLength) + pos0 = pos; + end = pos0; + while (true) { + if (end > start) { + t1 = end - 1; + if (!(t1 >= 0)) + return A.ioore(stream, t1); + t1 = stream[t1] === 0; + } else + t1 = false; + if (!t1) + break; + --end; + } + if (start === 0) { + if (end !== start) + throw A.wrapException(A.Exception_Exception("byte stream contains leading data")); + } else + B.JSArray_methods.add$1(result, start); + pos0 += 3; + } + return result; + }, + CryptorError: function CryptorError(t0) { + this._name = t0; + }, + FrameInfo: function FrameInfo(t0, t1, t2, t3) { + var _ = this; + _.frameType = t0; + _.ssrc = t1; + _.timestamp = t2; + _.buffer = t3; + }, + FrameCryptor: function FrameCryptor(t0, t1, t2, t3, t4, t5, t6) { + var _ = this; + _.sendCounts = t0; + _.participantIdentity = t1; + _.trackId = t2; + _.codec = null; + _.keyHandler = t3; + _.__FrameCryptor_kind_A = $; + _._enabled = false; + _.lastError = t4; + _.currentKeyIndex = 0; + _.worker = t5; + _.sifGuard = t6; + }, + FrameCryptor_decodeFunction_decryptFrameInternal: function FrameCryptor_decodeFunction_decryptFrameInternal(t0, t1, t2, t3, t4, t5, t6) { + var _ = this; + _._box_1 = t0; + _._box_0 = t1; + _.$this = t2; + _.iv = t3; + _.srcFrame = t4; + _.headerLength = t5; + _.ivLength = t6; + }, + FrameCryptor_decodeFunction_ratchedKeyInternal: function FrameCryptor_decodeFunction_ratchedKeyInternal(t0, t1, t2, t3) { + var _ = this; + _._box_1 = t0; + _._box_0 = t1; + _.$this = t2; + _.decryptFrameInternal = t3; + }, + ParticipantKeyHandler$(keyOptions, participantIdentity, worker) { + var t1 = new A.ParticipantKeyHandler(keyOptions, worker, participantIdentity), + t2 = keyOptions.keyRingSze; + if (t2 <= 0 || t2 > 255) + A.throwExpression(A.Exception_Exception("Invalid key ring size")); + t1.set$__ParticipantKeyHandler_cryptoKeyRing_A(type$.List_nullable_KeySet._as(A.List_List$filled(t2, null, false, type$.nullable_KeySet))); + return t1; + }, + KeyOptions: function KeyOptions(t0, t1, t2, t3, t4, t5, t6) { + var _ = this; + _.sharedKey = t0; + _.ratchetSalt = t1; + _.ratchetWindowSize = t2; + _.failureTolerance = t3; + _.uncryptedMagicBytes = t4; + _.keyRingSze = t5; + _.discardFrameWhenCryptorNotReady = t6; + }, + KeyProvider: function KeyProvider(t0, t1, t2, t3) { + var _ = this; + _.worker = t0; + _.keyProviderOptions = t1; + _.participantKeys = t2; + _.sharedKeyHandler = null; + _.sharedKey = t3; + }, + KeySet: function KeySet(t0, t1) { + this.material = t0; + this.encryptionKey = t1; + }, + ParticipantKeyHandler: function ParticipantKeyHandler(t0, t1, t2) { + var _ = this; + _.currentKeyIndex = 0; + _.__ParticipantKeyHandler_cryptoKeyRing_A = $; + _._hasValidKey = false; + _.keyOptions = t0; + _.worker = t1; + _.participantIdentity = t2; + _._decryptionFailureCount = 0; + }, + SifGuard: function SifGuard() { + var _ = this; + _.consecutiveSifCount = 0; + _.sifSequenceStartedAt = null; + _.userFramesSinceSif = _.lastSifReceivedAt = 0; + }, + getTrackCryptor(participantIdentity, trackId, keyProvider) { + var t1, t2, _null = null, + cryptor = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.getTrackCryptor_closure(trackId), type$.FrameCryptor); + if (cryptor == null) { + $.$get$logger().log$4(B.Level_INFO_800, "creating new cryptor for " + participantIdentity + ", trackId " + trackId, _null, _null); + t1 = type$.JSObject._as(self.self); + t2 = type$.int; + cryptor = new A.FrameCryptor(A.LinkedHashMap_LinkedHashMap$_empty(t2, t2), participantIdentity, trackId, keyProvider.getParticipantKeyHandler$1(participantIdentity), B.CryptorError_0, t1, new A.SifGuard()); + B.JSArray_methods.add$1($.participantCryptors, cryptor); + } else if (participantIdentity !== cryptor.participantIdentity) { + t1 = keyProvider.getParticipantKeyHandler$1(participantIdentity); + if (cryptor.lastError !== B.CryptorError_1) { + $.$get$logger().log$4(B.Level_INFO_800, "setParticipantId: lastError != CryptorError.kOk, reset state to kNew", _null, _null); + cryptor.lastError = B.CryptorError_0; + } + cryptor.participantIdentity = participantIdentity; + cryptor.keyHandler = t1; + cryptor.sifGuard.reset$0(); + } + return cryptor; + }, + unsetCryptorParticipant(trackId) { + var t1 = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.unsetCryptorParticipant_closure(trackId), type$.FrameCryptor); + if (t1 != null) + t1.participantIdentity = null; + }, + main() { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + t2, t3, t1; + var $async$main = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $.$get$Logger_root(); + if (t1.parent != null) + A.throwExpression(A.UnsupportedError$('Please set "hierarchicalLoggingEnabled" to true if you want to change the level on a non-root logger.')); + J.$eq$(t1._level, B.Level_WARNING_900); + t1._level = B.Level_WARNING_900; + t1._getStream$0().listen$1(new A.main_closure()); + t1 = $.$get$logger(); + t1.log$4(B.Level_INFO_800, "Worker created", null, null); + t2 = self; + t3 = type$.JSObject; + if ("RTCTransformEvent" in t3._as(t2.self)) { + t1.log$4(B.Level_INFO_800, "setup RTCTransformEvent event handler", null, null); + t3._as(t2.self).onrtctransform = A._functionToJS1(new A.main_closure0()); + } + t3._as(t2.self).onmessage = A._functionToJS1(new A.main_closure1(new A.main_closure2())); + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$main, $async$completer); + }, + getTrackCryptor_closure: function getTrackCryptor_closure(t0) { + this.trackId = t0; + }, + unsetCryptorParticipant_closure: function unsetCryptorParticipant_closure(t0) { + this.trackId = t0; + }, + main_closure: function main_closure() { + }, + main_closure0: function main_closure0() { + }, + main_closure2: function main_closure2() { + }, + main__closure: function main__closure(t0) { + this.trackId = t0; + }, + main__closure0: function main__closure0(t0) { + this.trackId = t0; + }, + main__closure1: function main__closure1(t0) { + this.trackId = t0; + }, + main__closure2: function main__closure2(t0) { + this.trackId = t0; + }, + main_closure1: function main_closure1(t0) { + this.handleMessage = t0; + }, + Level: function Level(t0, t1) { + this.name = t0; + this.value = t1; + }, + LogRecord: function LogRecord(t0, t1, t2) { + this.level = t0; + this.message = t1; + this.loggerName = t2; + }, + Logger_Logger($name) { + return $.Logger__loggers.putIfAbsent$2($name, new A.Logger_Logger_closure($name)); + }, + Logger: function Logger(t0, t1, t2) { + var _ = this; + _.name = t0; + _.parent = t1; + _._level = null; + _._children = t2; + _._controller = null; + }, + Logger_Logger_closure: function Logger_Logger_closure(t0) { + this.name = t0; + }, + printString(string) { + if (typeof dartPrint == "function") { + dartPrint(string); + return; + } + if (typeof console == "object" && typeof console.log != "undefined") { + console.log(string); + return; + } + if (typeof print == "function") { + print(string); + return; + } + throw "Unable to print message: " + String(string); + }, + throwLateFieldNI(fieldName) { + A.throwExpressionWithWrapper(new A.LateError("Field '" + fieldName + "' has not been initialized."), new Error()); + }, + throwLateFieldADI(fieldName) { + A.throwExpressionWithWrapper(new A.LateError("Field '" + fieldName + "' has been assigned during initialization."), new Error()); + }, + IterableExtension_firstWhereOrNull(_this, test, $T) { + var t1, _i, element; + for (t1 = _this.length, _i = 0; _i < _this.length; _this.length === t1 || (0, A.throwConcurrentModificationError)(_this), ++_i) { + element = _this[_i]; + if (A.boolConversionCheck(test.call$1(element))) + return element; + } + return null; + }, + getAlgoOptions(algorithmName, salt) { + switch (algorithmName) { + case "HKDF": + return A.LinkedHashMap_LinkedHashMap$_literal(["name", "HKDF", "salt", salt, "hash", "SHA-256", "info", new Uint8Array(128)], type$.String, type$.dynamic); + case "PBKDF2": + return A.LinkedHashMap_LinkedHashMap$_literal(["name", "PBKDF2", "salt", salt, "hash", "SHA-256", "iterations", 100000], type$.String, type$.dynamic); + default: + throw A.wrapException(A.Exception_Exception("algorithm " + algorithmName + " is currently unsupported")); + } + } + }, + B = {}; + var holders = [A, J, B]; + var $ = {}; + A.JS_CONST.prototype = {}; + J.Interceptor.prototype = { + $eq(receiver, other) { + return receiver === other; + }, + get$hashCode(receiver) { + return A.Primitives_objectHashCode(receiver); + }, + toString$0(receiver) { + return "Instance of '" + A.Primitives_objectTypeName(receiver) + "'"; + }, + noSuchMethod$1(receiver, invocation) { + throw A.wrapException(A.NoSuchMethodError_NoSuchMethodError$withInvocation(receiver, type$.Invocation._as(invocation))); + }, + get$runtimeType(receiver) { + return A.createRuntimeType(A._instanceTypeFromConstructor(this)); + } + }; + J.JSBool.prototype = { + toString$0(receiver) { + return String(receiver); + }, + get$hashCode(receiver) { + return receiver ? 519018 : 218159; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(type$.bool); + }, + $isTrustedGetRuntimeType: 1, + $isbool: 1 + }; + J.JSNull.prototype = { + $eq(receiver, other) { + return null == other; + }, + toString$0(receiver) { + return "null"; + }, + get$hashCode(receiver) { + return 0; + }, + $isTrustedGetRuntimeType: 1, + $isNull: 1 + }; + J.JavaScriptObject.prototype = {$isJSObject: 1}; + J.LegacyJavaScriptObject.prototype = { + get$hashCode(receiver) { + return 0; + }, + get$runtimeType(receiver) { + return B.Type_JSObject_ttY; + }, + toString$0(receiver) { + return String(receiver); + } + }; + J.PlainJavaScriptObject.prototype = {}; + J.UnknownJavaScriptObject.prototype = {}; + J.JavaScriptFunction.prototype = { + toString$0(receiver) { + var dartClosure = receiver[$.$get$DART_CLOSURE_PROPERTY_NAME()]; + if (dartClosure == null) + return this.super$LegacyJavaScriptObject$toString(receiver); + return "JavaScript function for " + J.toString$0$(dartClosure); + }, + $isFunction: 1 + }; + J.JavaScriptBigInt.prototype = { + get$hashCode(receiver) { + return 0; + }, + toString$0(receiver) { + return String(receiver); + } + }; + J.JavaScriptSymbol.prototype = { + get$hashCode(receiver) { + return 0; + }, + toString$0(receiver) { + return String(receiver); + } + }; + J.JSArray.prototype = { + add$1(receiver, value) { + A._arrayInstanceType(receiver)._precomputed1._as(value); + receiver.$flags & 1 && A.throwUnsupportedOperation(receiver, 29); + receiver.push(value); + }, + addAll$1(receiver, collection) { + var t1; + A._arrayInstanceType(receiver)._eval$1("Iterable<1>")._as(collection); + receiver.$flags & 1 && A.throwUnsupportedOperation(receiver, "addAll", 2); + if (Array.isArray(collection)) { + this._addAllFromArray$1(receiver, collection); + return; + } + for (t1 = J.get$iterator$ax(collection); t1.moveNext$0();) + receiver.push(t1.get$current()); + }, + _addAllFromArray$1(receiver, array) { + var len, i; + type$.JSArray_dynamic._as(array); + len = array.length; + if (len === 0) + return; + if (receiver === array) + throw A.wrapException(A.ConcurrentModificationError$(receiver)); + for (i = 0; i < len; ++i) + receiver.push(array[i]); + }, + map$1$1(receiver, f, $T) { + var t1 = A._arrayInstanceType(receiver); + return new A.MappedListIterable(receiver, t1._bind$1($T)._eval$1("1(2)")._as(f), t1._eval$1("@<1>")._bind$1($T)._eval$1("MappedListIterable<1,2>")); + }, + elementAt$1(receiver, index) { + if (!(index >= 0 && index < receiver.length)) + return A.ioore(receiver, index); + return receiver[index]; + }, + toString$0(receiver) { + return A.Iterable_iterableToFullString(receiver, "[", "]"); + }, + get$iterator(receiver) { + return new J.ArrayIterator(receiver, receiver.length, A._arrayInstanceType(receiver)._eval$1("ArrayIterator<1>")); + }, + get$hashCode(receiver) { + return A.Primitives_objectHashCode(receiver); + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + if (!(index >= 0 && index < receiver.length)) + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + return receiver[index]; + }, + $indexSet(receiver, index, value) { + A._arrayInstanceType(receiver)._precomputed1._as(value); + receiver.$flags & 2 && A.throwUnsupportedOperation(receiver); + if (!(index >= 0 && index < receiver.length)) + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + receiver[index] = value; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(A._arrayInstanceType(receiver)); + }, + $isEfficientLengthIterable: 1, + $isIterable: 1, + $isList: 1 + }; + J.JSUnmodifiableArray.prototype = {}; + J.ArrayIterator.prototype = { + get$current() { + var t1 = this._current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var t2, _this = this, + t1 = _this._iterable, + $length = t1.length; + if (_this._length !== $length) { + t1 = A.throwConcurrentModificationError(t1); + throw A.wrapException(t1); + } + t2 = _this._index; + if (t2 >= $length) { + _this.set$_current(null); + return false; + } + _this.set$_current(t1[t2]); + ++_this._index; + return true; + }, + set$_current(_current) { + this._current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + J.JSNumber.prototype = { + toInt$0(receiver) { + var t1; + if (receiver >= -2147483648 && receiver <= 2147483647) + return receiver | 0; + if (isFinite(receiver)) { + t1 = receiver < 0 ? Math.ceil(receiver) : Math.floor(receiver); + return t1 + 0; + } + throw A.wrapException(A.UnsupportedError$("" + receiver + ".toInt()")); + }, + toRadixString$1(receiver, radix) { + var result, t1, t2, match, exponent; + if (radix < 2 || radix > 36) + throw A.wrapException(A.RangeError$range(radix, 2, 36, "radix", null)); + result = receiver.toString(radix); + t1 = result.length; + t2 = t1 - 1; + if (!(t2 >= 0)) + return A.ioore(result, t2); + if (result.charCodeAt(t2) !== 41) + return result; + match = /^([\da-z]+)(?:\.([\da-z]+))?\(e\+(\d+)\)$/.exec(result); + if (match == null) + A.throwExpression(A.UnsupportedError$("Unexpected toString result: " + result)); + t1 = match.length; + if (1 >= t1) + return A.ioore(match, 1); + result = match[1]; + if (3 >= t1) + return A.ioore(match, 3); + exponent = +match[3]; + t1 = match[2]; + if (t1 != null) { + result += t1; + exponent -= t1.length; + } + return result + B.JSString_methods.$mul("0", exponent); + }, + toString$0(receiver) { + if (receiver === 0 && 1 / receiver < 0) + return "-0.0"; + else + return "" + receiver; + }, + get$hashCode(receiver) { + var absolute, floorLog2, factor, scaled, + intValue = receiver | 0; + if (receiver === intValue) + return intValue & 536870911; + absolute = Math.abs(receiver); + floorLog2 = Math.log(absolute) / 0.6931471805599453 | 0; + factor = Math.pow(2, floorLog2); + scaled = absolute < 1 ? absolute / factor : factor / absolute; + return ((scaled * 9007199254740992 | 0) + (scaled * 3542243181176521 | 0)) * 599197 + floorLog2 * 1259 & 536870911; + }, + $mod(receiver, other) { + var result = receiver % other; + if (result === 0) + return 0; + if (result > 0) + return result; + return result + other; + }, + _tdivFast$1(receiver, other) { + return (receiver | 0) === receiver ? receiver / other | 0 : this._tdivSlow$1(receiver, other); + }, + _tdivSlow$1(receiver, other) { + var quotient = receiver / other; + if (quotient >= -2147483648 && quotient <= 2147483647) + return quotient | 0; + if (quotient > 0) { + if (quotient !== 1 / 0) + return Math.floor(quotient); + } else if (quotient > -1 / 0) + return Math.ceil(quotient); + throw A.wrapException(A.UnsupportedError$("Result of truncating division is " + A.S(quotient) + ": " + A.S(receiver) + " ~/ " + other)); + }, + _shrOtherPositive$1(receiver, other) { + var t1; + if (receiver > 0) + t1 = this._shrBothPositive$1(receiver, other); + else { + t1 = other > 31 ? 31 : other; + t1 = receiver >> t1 >>> 0; + } + return t1; + }, + _shrBothPositive$1(receiver, other) { + return other > 31 ? 0 : receiver >>> other; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(type$.num); + }, + $isdouble: 1, + $isnum: 1 + }; + J.JSInt.prototype = { + get$runtimeType(receiver) { + return A.createRuntimeType(type$.int); + }, + $isTrustedGetRuntimeType: 1, + $isint: 1 + }; + J.JSNumNotInt.prototype = { + get$runtimeType(receiver) { + return A.createRuntimeType(type$.double); + }, + $isTrustedGetRuntimeType: 1 + }; + J.JSString.prototype = { + endsWith$1(receiver, other) { + var otherLength = other.length, + t1 = receiver.length; + if (otherLength > t1) + return false; + return other === this.substring$1(receiver, t1 - otherLength); + }, + startsWith$1(receiver, pattern) { + var otherLength = pattern.length; + if (otherLength > receiver.length) + return false; + return pattern === receiver.substring(0, otherLength); + }, + substring$2(receiver, start, end) { + return receiver.substring(start, A.RangeError_checkValidRange(start, end, receiver.length)); + }, + substring$1(receiver, start) { + return this.substring$2(receiver, start, null); + }, + $mul(receiver, times) { + var s, result; + if (0 >= times) + return ""; + if (times === 1 || receiver.length === 0) + return receiver; + if (times !== times >>> 0) + throw A.wrapException(B.C_OutOfMemoryError); + for (s = receiver, result = ""; true;) { + if ((times & 1) === 1) + result = s + result; + times = times >>> 1; + if (times === 0) + break; + s += s; + } + return result; + }, + lastIndexOf$1(receiver, pattern) { + var start = receiver.length, + t1 = pattern.length; + if (start + t1 > start) + start -= t1; + return receiver.lastIndexOf(pattern, start); + }, + toString$0(receiver) { + return receiver; + }, + get$hashCode(receiver) { + var t1, hash, i; + for (t1 = receiver.length, hash = 0, i = 0; i < t1; ++i) { + hash = hash + receiver.charCodeAt(i) & 536870911; + hash = hash + ((hash & 524287) << 10) & 536870911; + hash ^= hash >> 6; + } + hash = hash + ((hash & 67108863) << 3) & 536870911; + hash ^= hash >> 11; + return hash + ((hash & 16383) << 15) & 536870911; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(type$.String); + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + if (!(index.$ge(0, 0) && index.$lt(0, receiver.length))) + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isPattern: 1, + $isString: 1 + }; + A._CopyingBytesBuilder.prototype = { + add$1(_, bytes) { + var byteCount, required, t1, t2, newSize, x, newBuffer, _this = this; + type$.List_int._as(bytes); + byteCount = bytes.length; + if (byteCount === 0) + return; + required = _this.__internal$_length + byteCount; + t1 = _this._buffer; + t2 = t1.length; + if (t2 < required) { + newSize = required * 2; + if (newSize < 1024) + newSize = 1024; + else { + x = newSize - 1; + x |= B.JSInt_methods._shrOtherPositive$1(x, 1); + x |= x >>> 2; + x |= x >>> 4; + x |= x >>> 8; + newSize = ((x | x >>> 16) >>> 0) + 1; + } + newBuffer = new Uint8Array(newSize); + B.NativeUint8List_methods.setRange$3(newBuffer, 0, t2, t1); + _this._buffer = newBuffer; + t1 = newBuffer; + } + B.NativeUint8List_methods.setRange$3(t1, _this.__internal$_length, required, bytes); + _this.__internal$_length = required; + }, + toBytes$0() { + var _this = this; + if (_this.__internal$_length === 0) + return $.$get$_CopyingBytesBuilder__emptyList(); + return new Uint8Array(A._ensureNativeList(J.asUint8List$2$x(B.NativeUint8List_methods.get$buffer(_this._buffer), _this._buffer.byteOffset, _this.__internal$_length))); + }, + get$length(_) { + return this.__internal$_length; + }, + $isBytesBuilder: 1 + }; + A.LateError.prototype = { + toString$0(_) { + return "LateInitializationError: " + this._message; + } + }; + A.SentinelValue.prototype = {}; + A.EfficientLengthIterable.prototype = {}; + A.ListIterable.prototype = { + get$iterator(_) { + var _this = this; + return new A.ListIterator(_this, _this.get$length(_this), A._instanceType(_this)._eval$1("ListIterator")); + }, + map$1$1(_, toElement, $T) { + var t1 = A._instanceType(this); + return new A.MappedListIterable(this, t1._bind$1($T)._eval$1("1(ListIterable.E)")._as(toElement), t1._eval$1("@")._bind$1($T)._eval$1("MappedListIterable<1,2>")); + } + }; + A.ListIterator.prototype = { + get$current() { + var t1 = this.__internal$_current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var t3, _this = this, + t1 = _this.__internal$_iterable, + t2 = J.getInterceptor$asx(t1), + $length = t2.get$length(t1); + if (_this.__internal$_length !== $length) + throw A.wrapException(A.ConcurrentModificationError$(t1)); + t3 = _this.__internal$_index; + if (t3 >= $length) { + _this.set$__internal$_current(null); + return false; + } + _this.set$__internal$_current(t2.elementAt$1(t1, t3)); + ++_this.__internal$_index; + return true; + }, + set$__internal$_current(_current) { + this.__internal$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.MappedIterable.prototype = { + get$iterator(_) { + var t1 = this.__internal$_iterable; + return new A.MappedIterator(t1.get$iterator(t1), this._f, A._instanceType(this)._eval$1("MappedIterator<1,2>")); + }, + get$length(_) { + var t1 = this.__internal$_iterable; + return t1.get$length(t1); + } + }; + A.EfficientLengthMappedIterable.prototype = {$isEfficientLengthIterable: 1}; + A.MappedIterator.prototype = { + moveNext$0() { + var _this = this, + t1 = _this._iterator; + if (t1.moveNext$0()) { + _this.set$__internal$_current(_this._f.call$1(t1.get$current())); + return true; + } + _this.set$__internal$_current(null); + return false; + }, + get$current() { + var t1 = this.__internal$_current; + return t1 == null ? this.$ti._rest[1]._as(t1) : t1; + }, + set$__internal$_current(_current) { + this.__internal$_current = this.$ti._eval$1("2?")._as(_current); + }, + $isIterator: 1 + }; + A.MappedListIterable.prototype = { + get$length(_) { + return J.get$length$asx(this._source); + }, + elementAt$1(_, index) { + return this._f.call$1(J.elementAt$1$ax(this._source, index)); + } + }; + A.WhereIterable.prototype = { + get$iterator(_) { + return new A.WhereIterator(J.get$iterator$ax(this.__internal$_iterable), this._f, this.$ti._eval$1("WhereIterator<1>")); + }, + map$1$1(_, toElement, $T) { + var t1 = this.$ti; + return new A.MappedIterable(this, t1._bind$1($T)._eval$1("1(2)")._as(toElement), t1._eval$1("@<1>")._bind$1($T)._eval$1("MappedIterable<1,2>")); + } + }; + A.WhereIterator.prototype = { + moveNext$0() { + var t1, t2; + for (t1 = this._iterator, t2 = this._f; t1.moveNext$0();) + if (A.boolConversionCheck(t2.call$1(t1.get$current()))) + return true; + return false; + }, + get$current() { + return this._iterator.get$current(); + }, + $isIterator: 1 + }; + A.FixedLengthListMixin.prototype = {}; + A.Symbol.prototype = { + get$hashCode(_) { + var hash = this._hashCode; + if (hash != null) + return hash; + hash = 664597 * B.JSString_methods.get$hashCode(this.__internal$_name) & 536870911; + this._hashCode = hash; + return hash; + }, + toString$0(_) { + return 'Symbol("' + this.__internal$_name + '")'; + }, + $eq(_, other) { + if (other == null) + return false; + return other instanceof A.Symbol && this.__internal$_name === other.__internal$_name; + }, + $isSymbol0: 1 + }; + A.ConstantMapView.prototype = {}; + A.ConstantMap.prototype = { + toString$0(_) { + return A.MapBase_mapToString(this); + }, + $isMap: 1 + }; + A.ConstantStringMap.prototype = { + get$length(_) { + return this._values.length; + }, + get$_keys() { + var keys = this.$keys; + if (keys == null) { + keys = Object.keys(this._jsIndex); + this.$keys = keys; + } + return keys; + }, + containsKey$1(key) { + if (typeof key != "string") + return false; + if ("__proto__" === key) + return false; + return this._jsIndex.hasOwnProperty(key); + }, + $index(_, key) { + if (!this.containsKey$1(key)) + return null; + return this._values[this._jsIndex[key]]; + }, + forEach$1(_, f) { + var keys, values, t1, i; + this.$ti._eval$1("~(1,2)")._as(f); + keys = this.get$_keys(); + values = this._values; + for (t1 = keys.length, i = 0; i < t1; ++i) + f.call$2(keys[i], values[i]); + }, + get$keys() { + return new A._KeysOrValues(this.get$_keys(), this.$ti._eval$1("_KeysOrValues<1>")); + } + }; + A._KeysOrValues.prototype = { + get$length(_) { + return this._elements.length; + }, + get$iterator(_) { + var t1 = this._elements; + return new A._KeysOrValuesOrElementsIterator(t1, t1.length, this.$ti._eval$1("_KeysOrValuesOrElementsIterator<1>")); + } + }; + A._KeysOrValuesOrElementsIterator.prototype = { + get$current() { + var t1 = this.__js_helper$_current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var _this = this, + t1 = _this.__js_helper$_index; + if (t1 >= _this.__js_helper$_length) { + _this.set$__js_helper$_current(null); + return false; + } + _this.set$__js_helper$_current(_this._elements[t1]); + ++_this.__js_helper$_index; + return true; + }, + set$__js_helper$_current(_current) { + this.__js_helper$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.JSInvocationMirror.prototype = { + get$memberName() { + var t1 = this._memberName; + if (t1 instanceof A.Symbol) + return t1; + return this._memberName = new A.Symbol(A._asString(t1)); + }, + get$positionalArguments() { + var t1, t2, argumentCount, list, index, _this = this; + if (_this.__js_helper$_kind === 1) + return B.List_empty; + t1 = _this._arguments; + t2 = J.getInterceptor$asx(t1); + argumentCount = t2.get$length(t1) - J.get$length$asx(_this._namedArgumentNames) - _this._typeArgumentCount; + if (argumentCount === 0) + return B.List_empty; + list = []; + for (index = 0; index < argumentCount; ++index) + list.push(t2.$index(t1, index)); + list.$flags = 3; + return list; + }, + get$namedArguments() { + var t1, t2, namedArgumentCount, t3, t4, namedArgumentsStartIndex, map, i, _this = this; + if (_this.__js_helper$_kind !== 0) + return B.Map_empty; + t1 = _this._namedArgumentNames; + t2 = J.getInterceptor$asx(t1); + namedArgumentCount = t2.get$length(t1); + t3 = _this._arguments; + t4 = J.getInterceptor$asx(t3); + namedArgumentsStartIndex = t4.get$length(t3) - namedArgumentCount - _this._typeArgumentCount; + if (namedArgumentCount === 0) + return B.Map_empty; + map = new A.JsLinkedHashMap(type$.JsLinkedHashMap_Symbol_dynamic); + for (i = 0; i < namedArgumentCount; ++i) + map.$indexSet(0, new A.Symbol(A._asString(t2.$index(t1, i))), t4.$index(t3, namedArgumentsStartIndex + i)); + return new A.ConstantMapView(map, type$.ConstantMapView_Symbol_dynamic); + }, + $isInvocation: 1 + }; + A.Primitives_functionNoSuchMethod_closure.prototype = { + call$2($name, argument) { + var t1; + A._asString($name); + t1 = this._box_0; + t1.names = t1.names + "$" + $name; + B.JSArray_methods.add$1(this.namedArgumentList, $name); + B.JSArray_methods.add$1(this.$arguments, argument); + ++t1.argumentCount; + }, + $signature: 12 + }; + A.TypeErrorDecoder.prototype = { + matchTypeError$1(message) { + var result, t1, _this = this, + match = new RegExp(_this._pattern).exec(message); + if (match == null) + return null; + result = Object.create(null); + t1 = _this._arguments; + if (t1 !== -1) + result.arguments = match[t1 + 1]; + t1 = _this._argumentsExpr; + if (t1 !== -1) + result.argumentsExpr = match[t1 + 1]; + t1 = _this._expr; + if (t1 !== -1) + result.expr = match[t1 + 1]; + t1 = _this._method; + if (t1 !== -1) + result.method = match[t1 + 1]; + t1 = _this._receiver; + if (t1 !== -1) + result.receiver = match[t1 + 1]; + return result; + } + }; + A.NullError.prototype = { + toString$0(_) { + return "Null check operator used on a null value"; + } + }; + A.JsNoSuchMethodError.prototype = { + toString$0(_) { + var t2, _this = this, + _s38_ = "NoSuchMethodError: method not found: '", + t1 = _this._method; + if (t1 == null) + return "NoSuchMethodError: " + _this.__js_helper$_message; + t2 = _this._receiver; + if (t2 == null) + return _s38_ + t1 + "' (" + _this.__js_helper$_message + ")"; + return _s38_ + t1 + "' on '" + t2 + "' (" + _this.__js_helper$_message + ")"; + } + }; + A.UnknownJsTypeError.prototype = { + toString$0(_) { + var t1 = this.__js_helper$_message; + return t1.length === 0 ? "Error" : "Error: " + t1; + } + }; + A.NullThrownFromJavaScriptException.prototype = { + toString$0(_) { + return "Throw of null ('" + (this._irritant === null ? "null" : "undefined") + "' from JavaScript)"; + } + }; + A.ExceptionAndStackTrace.prototype = {}; + A._StackTrace.prototype = { + toString$0(_) { + var trace, + t1 = this._trace; + if (t1 != null) + return t1; + t1 = this._exception; + trace = t1 !== null && typeof t1 === "object" ? t1.stack : null; + return this._trace = trace == null ? "" : trace; + }, + $isStackTrace: 1 + }; + A.Closure.prototype = { + toString$0(_) { + var $constructor = this.constructor, + $name = $constructor == null ? null : $constructor.name; + return "Closure '" + A.unminifyOrTag($name == null ? "unknown" : $name) + "'"; + }, + $isFunction: 1, + get$$call() { + return this; + }, + "call*": "call$1", + $requiredArgCount: 1, + $defaultValues: null + }; + A.Closure0Args.prototype = {"call*": "call$0", $requiredArgCount: 0}; + A.Closure2Args.prototype = {"call*": "call$2", $requiredArgCount: 2}; + A.TearOffClosure.prototype = {}; + A.StaticClosure.prototype = { + toString$0(_) { + var $name = this.$static_name; + if ($name == null) + return "Closure of unknown static method"; + return "Closure '" + A.unminifyOrTag($name) + "'"; + } + }; + A.BoundClosure.prototype = { + $eq(_, other) { + if (other == null) + return false; + if (this === other) + return true; + if (!(other instanceof A.BoundClosure)) + return false; + return this.$_target === other.$_target && this._receiver === other._receiver; + }, + get$hashCode(_) { + return (A.objectHashCode(this._receiver) ^ A.Primitives_objectHashCode(this.$_target)) >>> 0; + }, + toString$0(_) { + return "Closure '" + this.$_name + "' of " + ("Instance of '" + A.Primitives_objectTypeName(this._receiver) + "'"); + } + }; + A._CyclicInitializationError.prototype = { + toString$0(_) { + return "Reading static variable '" + this.variableName + "' during its initialization"; + } + }; + A.RuntimeError.prototype = { + toString$0(_) { + return "RuntimeError: " + this.message; + } + }; + A._AssertionError.prototype = { + toString$0(_) { + return "Assertion failed: " + A.Error_safeToString(this.message); + } + }; + A._Required.prototype = {}; + A.JsLinkedHashMap.prototype = { + get$length(_) { + return this.__js_helper$_length; + }, + get$keys() { + return new A.LinkedHashMapKeysIterable(this, A._instanceType(this)._eval$1("LinkedHashMapKeysIterable<1>")); + }, + containsKey$1(key) { + var strings = this._strings; + if (strings == null) + return false; + return strings[key] != null; + }, + $index(_, key) { + var strings, cell, t1, nums, _null = null; + if (typeof key == "string") { + strings = this._strings; + if (strings == null) + return _null; + cell = strings[key]; + t1 = cell == null ? _null : cell.hashMapCellValue; + return t1; + } else if (typeof key == "number" && (key & 0x3fffffff) === key) { + nums = this._nums; + if (nums == null) + return _null; + cell = nums[key]; + t1 = cell == null ? _null : cell.hashMapCellValue; + return t1; + } else + return this.internalGet$1(key); + }, + internalGet$1(key) { + var bucket, index, + rest = this.__js_helper$_rest; + if (rest == null) + return null; + bucket = rest[this.internalComputeHashCode$1(key)]; + index = this.internalFindBucketIndex$2(bucket, key); + if (index < 0) + return null; + return bucket[index].hashMapCellValue; + }, + $indexSet(_, key, value) { + var strings, nums, rest, hash, bucket, index, _this = this, + t1 = A._instanceType(_this); + t1._precomputed1._as(key); + t1._rest[1]._as(value); + if (typeof key == "string") { + strings = _this._strings; + _this.__js_helper$_addHashTableEntry$3(strings == null ? _this._strings = _this._newHashTable$0() : strings, key, value); + } else if (typeof key == "number" && (key & 0x3fffffff) === key) { + nums = _this._nums; + _this.__js_helper$_addHashTableEntry$3(nums == null ? _this._nums = _this._newHashTable$0() : nums, key, value); + } else { + rest = _this.__js_helper$_rest; + if (rest == null) + rest = _this.__js_helper$_rest = _this._newHashTable$0(); + hash = _this.internalComputeHashCode$1(key); + bucket = rest[hash]; + if (bucket == null) + rest[hash] = [_this._newLinkedCell$2(key, value)]; + else { + index = _this.internalFindBucketIndex$2(bucket, key); + if (index >= 0) + bucket[index].hashMapCellValue = value; + else + bucket.push(_this._newLinkedCell$2(key, value)); + } + } + }, + putIfAbsent$2(key, ifAbsent) { + var t2, value, _this = this, + t1 = A._instanceType(_this); + t1._precomputed1._as(key); + t1._eval$1("2()")._as(ifAbsent); + if (_this.containsKey$1(key)) { + t2 = _this.$index(0, key); + return t2 == null ? t1._rest[1]._as(t2) : t2; + } + value = ifAbsent.call$0(); + _this.$indexSet(0, key, value); + return value; + }, + remove$1(_, key) { + var t1 = this._removeHashTableEntry$2(this._strings, key); + return t1; + }, + forEach$1(_, action) { + var cell, modifications, _this = this; + A._instanceType(_this)._eval$1("~(1,2)")._as(action); + cell = _this._first; + modifications = _this._modifications; + for (; cell != null;) { + action.call$2(cell.hashMapCellKey, cell.hashMapCellValue); + if (modifications !== _this._modifications) + throw A.wrapException(A.ConcurrentModificationError$(_this)); + cell = cell._next; + } + }, + __js_helper$_addHashTableEntry$3(table, key, value) { + var cell, + t1 = A._instanceType(this); + t1._precomputed1._as(key); + t1._rest[1]._as(value); + cell = table[key]; + if (cell == null) + table[key] = this._newLinkedCell$2(key, value); + else + cell.hashMapCellValue = value; + }, + _removeHashTableEntry$2(table, key) { + var cell; + if (table == null) + return null; + cell = table[key]; + if (cell == null) + return null; + this._unlinkCell$1(cell); + delete table[key]; + return cell.hashMapCellValue; + }, + _modified$0() { + this._modifications = this._modifications + 1 & 1073741823; + }, + _newLinkedCell$2(key, value) { + var _this = this, + t1 = A._instanceType(_this), + cell = new A.LinkedHashMapCell(t1._precomputed1._as(key), t1._rest[1]._as(value)); + if (_this._first == null) + _this._first = _this._last = cell; + else { + t1 = _this._last; + t1.toString; + cell._previous = t1; + _this._last = t1._next = cell; + } + ++_this.__js_helper$_length; + _this._modified$0(); + return cell; + }, + _unlinkCell$1(cell) { + var _this = this, + previous = cell._previous, + next = cell._next; + if (previous == null) + _this._first = next; + else + previous._next = next; + if (next == null) + _this._last = previous; + else + next._previous = previous; + --_this.__js_helper$_length; + _this._modified$0(); + }, + internalComputeHashCode$1(key) { + return J.get$hashCode$(key) & 1073741823; + }, + internalFindBucketIndex$2(bucket, key) { + var $length, i; + if (bucket == null) + return -1; + $length = bucket.length; + for (i = 0; i < $length; ++i) + if (J.$eq$(bucket[i].hashMapCellKey, key)) + return i; + return -1; + }, + toString$0(_) { + return A.MapBase_mapToString(this); + }, + _newHashTable$0() { + var table = Object.create(null); + table[""] = table; + delete table[""]; + return table; + }, + $isLinkedHashMap: 1 + }; + A.LinkedHashMapCell.prototype = {}; + A.LinkedHashMapKeysIterable.prototype = { + get$length(_) { + return this._map.__js_helper$_length; + }, + get$iterator(_) { + var t1 = this._map; + return new A.LinkedHashMapKeyIterator(t1, t1._modifications, t1._first, this.$ti._eval$1("LinkedHashMapKeyIterator<1>")); + } + }; + A.LinkedHashMapKeyIterator.prototype = { + get$current() { + return this.__js_helper$_current; + }, + moveNext$0() { + var cell, _this = this, + t1 = _this._map; + if (_this._modifications !== t1._modifications) + throw A.wrapException(A.ConcurrentModificationError$(t1)); + cell = _this._cell; + if (cell == null) { + _this.set$__js_helper$_current(null); + return false; + } else { + _this.set$__js_helper$_current(cell.hashMapCellKey); + _this._cell = cell._next; + return true; + } + }, + set$__js_helper$_current(_current) { + this.__js_helper$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.initHooks_closure.prototype = { + call$1(o) { + return this.getTag(o); + }, + $signature: 13 + }; + A.initHooks_closure0.prototype = { + call$2(o, tag) { + return this.getUnknownTag(o, tag); + }, + $signature: 14 + }; + A.initHooks_closure1.prototype = { + call$1(tag) { + return this.prototypeForTag(A._asString(tag)); + }, + $signature: 15 + }; + A.NativeByteBuffer.prototype = { + get$runtimeType(receiver) { + return B.Type_ByteBuffer_rqD; + }, + asUint8List$2(receiver, offsetInBytes, $length) { + return $length == null ? new Uint8Array(receiver, offsetInBytes) : new Uint8Array(receiver, offsetInBytes, $length); + }, + asUint8List$0(receiver) { + return this.asUint8List$2(receiver, 0, null); + }, + $isTrustedGetRuntimeType: 1, + $isNativeByteBuffer: 1, + $isByteBuffer: 1 + }; + A.NativeTypedData.prototype = { + get$buffer(receiver) { + if (((receiver.$flags | 0) & 2) !== 0) + return new A._UnmodifiableNativeByteBufferView(receiver.buffer); + else + return receiver.buffer; + }, + _invalidPosition$3(receiver, position, $length, $name) { + var t1 = A.RangeError$range(position, 0, $length, $name, null); + throw A.wrapException(t1); + }, + _checkPosition$3(receiver, position, $length, $name) { + if (position >>> 0 !== position || position > $length) + this._invalidPosition$3(receiver, position, $length, $name); + } + }; + A._UnmodifiableNativeByteBufferView.prototype = { + asUint8List$2(_, offsetInBytes, $length) { + var result = A.NativeUint8List_NativeUint8List$view(this._data, offsetInBytes, $length); + result.$flags = 3; + return result; + }, + asUint8List$0(_) { + return this.asUint8List$2(0, 0, null); + }, + $isByteBuffer: 1 + }; + A.NativeByteData.prototype = { + get$runtimeType(receiver) { + return B.Type_ByteData_9dB; + }, + _setInt8$2(receiver, byteOffset, value) { + return receiver.setInt8(byteOffset, value); + }, + $isTrustedGetRuntimeType: 1, + $isByteData: 1 + }; + A.NativeTypedArray.prototype = { + get$length(receiver) { + return receiver.length; + }, + $isJavaScriptIndexingBehavior: 1 + }; + A.NativeTypedArrayOfDouble.prototype = { + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isEfficientLengthIterable: 1, + $isIterable: 1, + $isList: 1 + }; + A.NativeTypedArrayOfInt.prototype = { + setRange$3(receiver, start, end, iterable) { + var targetLength, count, sourceLength, source; + type$.Iterable_int._as(iterable); + receiver.$flags & 2 && A.throwUnsupportedOperation(receiver, 5); + targetLength = receiver.length; + this._checkPosition$3(receiver, start, targetLength, "start"); + this._checkPosition$3(receiver, end, targetLength, "end"); + if (start > end) + A.throwExpression(A.RangeError$range(start, 0, end, null, null)); + count = end - start; + sourceLength = iterable.length; + if (sourceLength < count) + A.throwExpression(A.StateError$("Not enough elements")); + source = sourceLength !== count ? iterable.subarray(0, count) : iterable; + receiver.set(source, start); + return; + }, + $isEfficientLengthIterable: 1, + $isIterable: 1, + $isList: 1 + }; + A.NativeFloat32List.prototype = { + get$runtimeType(receiver) { + return B.Type_Float32List_9Kz; + }, + $isTrustedGetRuntimeType: 1, + $isFloat32List: 1 + }; + A.NativeFloat64List.prototype = { + get$runtimeType(receiver) { + return B.Type_Float64List_9Kz; + }, + $isTrustedGetRuntimeType: 1, + $isFloat64List: 1 + }; + A.NativeInt16List.prototype = { + get$runtimeType(receiver) { + return B.Type_Int16List_s5h; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isInt16List: 1 + }; + A.NativeInt32List.prototype = { + get$runtimeType(receiver) { + return B.Type_Int32List_O8Z; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isInt32List: 1 + }; + A.NativeInt8List.prototype = { + get$runtimeType(receiver) { + return B.Type_Int8List_rFV; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isInt8List: 1 + }; + A.NativeUint16List.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint16List_kmP; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isUint16List: 1 + }; + A.NativeUint32List.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint32List_kmP; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isUint32List: 1 + }; + A.NativeUint8ClampedList.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint8ClampedList_04U; + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isUint8ClampedList: 1 + }; + A.NativeUint8List.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint8List_8Eb; + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + sublist$2(receiver, start, end) { + return new Uint8Array(receiver.subarray(start, A._checkValidRange(start, end, receiver.length))); + }, + sublist$1(receiver, start) { + return this.sublist$2(receiver, start, null); + }, + $isTrustedGetRuntimeType: 1, + $isUint8List: 1 + }; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin.prototype = {}; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin.prototype = {}; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin.prototype = {}; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin.prototype = {}; + A.Rti.prototype = { + _eval$1(recipe) { + return A._Universe_evalInEnvironment(init.typeUniverse, this, recipe); + }, + _bind$1(typeOrTuple) { + return A._Universe_bind(init.typeUniverse, this, typeOrTuple); + } + }; + A._FunctionParameters.prototype = {}; + A._Type.prototype = { + toString$0(_) { + return A._rtiToString(this._rti, null); + } + }; + A._Error.prototype = { + toString$0(_) { + return this.__rti$_message; + } + }; + A._TypeError.prototype = {$isTypeError: 1}; + A._AsyncRun__initializeScheduleImmediate_internalCallback.prototype = { + call$1(__wc0_formal) { + var t1 = this._box_0, + f = t1.storedCallback; + t1.storedCallback = null; + f.call$0(); + }, + $signature: 2 + }; + A._AsyncRun__initializeScheduleImmediate_closure.prototype = { + call$1(callback) { + var t1, t2; + this._box_0.storedCallback = type$.void_Function._as(callback); + t1 = this.div; + t2 = this.span; + t1.firstChild ? t1.removeChild(t2) : t1.appendChild(t2); + }, + $signature: 16 + }; + A._AsyncRun__scheduleImmediateJsOverride_internalCallback.prototype = { + call$0() { + this.callback.call$0(); + }, + $signature: 5 + }; + A._AsyncRun__scheduleImmediateWithSetImmediate_internalCallback.prototype = { + call$0() { + this.callback.call$0(); + }, + $signature: 5 + }; + A._TimerImpl.prototype = { + _TimerImpl$2(milliseconds, callback) { + if (self.setTimeout != null) + self.setTimeout(A.convertDartClosureToJS(new A._TimerImpl_internalCallback(this, callback), 0), milliseconds); + else + throw A.wrapException(A.UnsupportedError$("`setTimeout()` not found.")); + } + }; + A._TimerImpl_internalCallback.prototype = { + call$0() { + this.callback.call$0(); + }, + $signature: 0 + }; + A._AsyncAwaitCompleter.prototype = { + complete$1(value) { + var t2, _this = this, + t1 = _this.$ti; + t1._eval$1("1/?")._as(value); + if (value == null) + value = t1._precomputed1._as(value); + if (!_this.isSync) + _this._future._asyncComplete$1(value); + else { + t2 = _this._future; + if (t1._eval$1("Future<1>")._is(value)) + t2._chainFuture$1(value); + else + t2._completeWithValue$1(value); + } + }, + completeError$2(e, st) { + var t1 = this._future; + if (this.isSync) + t1._completeError$2(e, st); + else + t1._asyncCompleteError$2(e, st); + } + }; + A._awaitOnObject_closure.prototype = { + call$1(result) { + return this.bodyFunction.call$2(0, result); + }, + $signature: 3 + }; + A._awaitOnObject_closure0.prototype = { + call$2(error, stackTrace) { + this.bodyFunction.call$2(1, new A.ExceptionAndStackTrace(error, type$.StackTrace._as(stackTrace))); + }, + $signature: 17 + }; + A._wrapJsFunctionForAsync_closure.prototype = { + call$2(errorCode, result) { + this.$protected(A._asInt(errorCode), result); + }, + $signature: 18 + }; + A.AsyncError.prototype = { + toString$0(_) { + return A.S(this.error); + }, + $isError: 1, + get$stackTrace() { + return this.stackTrace; + } + }; + A._BroadcastStream.prototype = {}; + A._BroadcastSubscription.prototype = { + _onPause$0() { + }, + _onResume$0() { + }, + set$_async$_next(_next) { + this._async$_next = this.$ti._eval$1("_BroadcastSubscription<1>?")._as(_next); + }, + set$_async$_previous(_previous) { + this._async$_previous = this.$ti._eval$1("_BroadcastSubscription<1>?")._as(_previous); + } + }; + A._BroadcastStreamController.prototype = { + get$_mayAddEvent() { + return this._state < 4; + }, + _subscribe$4(onData, onError, onDone, cancelOnError) { + var t2, t3, t4, t5, subscription, oldLast, _this = this, + t1 = A._instanceType(_this); + t1._eval$1("~(1)?")._as(onData); + type$.nullable_void_Function._as(onDone); + if ((_this._state & 4) !== 0) { + t1 = new A._DoneStreamSubscription($.Zone__current, t1._eval$1("_DoneStreamSubscription<1>")); + A.scheduleMicrotask(t1.get$_onMicrotask()); + if (onDone != null) + t1.set$_onDone(type$.void_Function._as(onDone)); + return t1; + } + t2 = $.Zone__current; + t3 = cancelOnError ? 1 : 0; + t4 = onError != null ? 32 : 0; + type$.$env_1_1_void._bind$1(t1._precomputed1)._eval$1("1(2)")._as(onData); + A._BufferingStreamSubscription__registerErrorHandler(t2, onError); + t5 = onDone == null ? A.async___nullDoneHandler$closure() : onDone; + type$.void_Function._as(t5); + t1 = t1._eval$1("_BroadcastSubscription<1>"); + subscription = new A._BroadcastSubscription(_this, onData, t2, t3 | t4, t1); + subscription.set$_async$_previous(subscription); + subscription.set$_async$_next(subscription); + t1._as(subscription); + subscription._eventState = _this._state & 1; + oldLast = _this._lastSubscription; + _this.set$_lastSubscription(subscription); + subscription.set$_async$_next(null); + subscription.set$_async$_previous(oldLast); + if (oldLast == null) + _this.set$_firstSubscription(subscription); + else + oldLast.set$_async$_next(subscription); + if (_this._firstSubscription == _this._lastSubscription) + A._runGuarded(_this.onListen); + return subscription; + }, + _addEventError$0() { + if ((this._state & 4) !== 0) + return new A.StateError("Cannot add new events after calling close"); + return new A.StateError("Cannot add new events while doing an addStream"); + }, + _forEachListener$1(action) { + var t2, subscription, id, next, previous, _this = this, + t1 = A._instanceType(_this); + t1._eval$1("~(_BufferingStreamSubscription<1>)")._as(action); + t2 = _this._state; + if ((t2 & 2) !== 0) + throw A.wrapException(A.StateError$(string$.Cannot)); + subscription = _this._firstSubscription; + if (subscription == null) + return; + id = t2 & 1; + _this._state = t2 ^ 3; + for (t1 = t1._eval$1("_BroadcastSubscription<1>"); subscription != null;) { + t2 = subscription._eventState; + if ((t2 & 1) === id) { + subscription._eventState = t2 | 2; + action.call$1(subscription); + t2 = subscription._eventState ^= 1; + next = subscription._async$_next; + if ((t2 & 4) !== 0) { + t1._as(subscription); + previous = subscription._async$_previous; + if (previous == null) + _this.set$_firstSubscription(next); + else + previous.set$_async$_next(next); + if (next == null) + _this.set$_lastSubscription(previous); + else + next.set$_async$_previous(previous); + subscription.set$_async$_previous(subscription); + subscription.set$_async$_next(subscription); + } + subscription._eventState &= 4294967293; + subscription = next; + } else + subscription = subscription._async$_next; + } + _this._state &= 4294967293; + if (_this._firstSubscription == null) + _this._callOnCancel$0(); + }, + _callOnCancel$0() { + if ((this._state & 4) !== 0) + if (null.get$_mayComplete()) + null._asyncComplete$1(null); + A._runGuarded(this.onCancel); + }, + set$_firstSubscription(_firstSubscription) { + this._firstSubscription = A._instanceType(this)._eval$1("_BroadcastSubscription<1>?")._as(_firstSubscription); + }, + set$_lastSubscription(_lastSubscription) { + this._lastSubscription = A._instanceType(this)._eval$1("_BroadcastSubscription<1>?")._as(_lastSubscription); + }, + $isStreamController: 1, + $is_StreamControllerLifecycle: 1, + $is_EventDispatch: 1 + }; + A._SyncBroadcastStreamController.prototype = { + get$_mayAddEvent() { + return A._BroadcastStreamController.prototype.get$_mayAddEvent.call(this) && (this._state & 2) === 0; + }, + _addEventError$0() { + if ((this._state & 2) !== 0) + return new A.StateError(string$.Cannot); + return this.super$_BroadcastStreamController$_addEventError(); + }, + _sendData$1(data) { + var t1, _this = this; + _this.$ti._precomputed1._as(data); + t1 = _this._firstSubscription; + if (t1 == null) + return; + if (t1 === _this._lastSubscription) { + _this._state |= 2; + t1._add$1(data); + _this._state &= 4294967293; + if (_this._firstSubscription == null) + _this._callOnCancel$0(); + return; + } + _this._forEachListener$1(new A._SyncBroadcastStreamController__sendData_closure(_this, data)); + } + }; + A._SyncBroadcastStreamController__sendData_closure.prototype = { + call$1(subscription) { + this.$this.$ti._eval$1("_BufferingStreamSubscription<1>")._as(subscription)._add$1(this.data); + }, + $signature() { + return this.$this.$ti._eval$1("~(_BufferingStreamSubscription<1>)"); + } + }; + A._Completer.prototype = { + completeError$2(error, stackTrace) { + var _0_0, + t1 = this.future; + if ((t1._state & 30) !== 0) + throw A.wrapException(A.StateError$("Future already completed")); + _0_0 = A._interceptUserError(error, stackTrace); + t1._asyncCompleteError$2(_0_0.error, _0_0.stackTrace); + }, + completeError$1(error) { + return this.completeError$2(error, null); + } + }; + A._AsyncCompleter.prototype = { + complete$1(value) { + var t2, + t1 = this.$ti; + t1._eval$1("1/?")._as(value); + t2 = this.future; + if ((t2._state & 30) !== 0) + throw A.wrapException(A.StateError$("Future already completed")); + t2._asyncComplete$1(t1._eval$1("1/")._as(value)); + } + }; + A._FutureListener.prototype = { + matchesErrorTest$1(asyncError) { + if ((this.state & 15) !== 6) + return true; + return this.result._zone.runUnary$2$2(type$.bool_Function_Object._as(this.callback), asyncError.error, type$.bool, type$.Object); + }, + handleError$1(asyncError) { + var exception, _this = this, + errorCallback = _this.errorCallback, + result = null, + t1 = type$.dynamic, + t2 = type$.Object, + t3 = asyncError.error, + t4 = _this.result._zone; + if (type$.dynamic_Function_Object_StackTrace._is(errorCallback)) + result = t4.runBinary$3$3(errorCallback, t3, asyncError.stackTrace, t1, t2, type$.StackTrace); + else + result = t4.runUnary$2$2(type$.dynamic_Function_Object._as(errorCallback), t3, t1, t2); + try { + t1 = _this.$ti._eval$1("2/")._as(result); + return t1; + } catch (exception) { + if (type$.TypeError._is(A.unwrapException(exception))) { + if ((_this.state & 1) !== 0) + throw A.wrapException(A.ArgumentError$("The error handler of Future.then must return a value of the returned future's type", "onError")); + throw A.wrapException(A.ArgumentError$("The error handler of Future.catchError must return a value of the future's type", "onError")); + } else + throw exception; + } + } + }; + A._Future.prototype = { + then$1$2$onError(f, onError, $R) { + var currentZone, result, + t1 = this.$ti; + t1._bind$1($R)._eval$1("1/(2)")._as(f); + currentZone = $.Zone__current; + if (currentZone === B.C__RootZone) { + if (!type$.dynamic_Function_Object_StackTrace._is(onError) && !type$.dynamic_Function_Object._is(onError)) + throw A.wrapException(A.ArgumentError$value(onError, "onError", string$.Error_)); + } else { + $R._eval$1("@<0/>")._bind$1(t1._precomputed1)._eval$1("1(2)")._as(f); + onError = A._registerErrorHandler(onError, currentZone); + } + result = new A._Future(currentZone, $R._eval$1("_Future<0>")); + this._addListener$1(new A._FutureListener(result, 3, f, onError, t1._eval$1("@<1>")._bind$1($R)._eval$1("_FutureListener<1,2>"))); + return result; + }, + _thenAwait$1$2(f, onError, $E) { + var result, + t1 = this.$ti; + t1._bind$1($E)._eval$1("1/(2)")._as(f); + result = new A._Future($.Zone__current, $E._eval$1("_Future<0>")); + this._addListener$1(new A._FutureListener(result, 19, f, onError, t1._eval$1("@<1>")._bind$1($E)._eval$1("_FutureListener<1,2>"))); + return result; + }, + _setErrorObject$1(error) { + this._state = this._state & 1 | 16; + this._resultOrListeners = error; + }, + _cloneResult$1(source) { + this._state = source._state & 30 | this._state & 1; + this._resultOrListeners = source._resultOrListeners; + }, + _addListener$1(listener) { + var source, _this = this, + t1 = _this._state; + if (t1 <= 3) { + listener._nextListener = type$.nullable__FutureListener_dynamic_dynamic._as(_this._resultOrListeners); + _this._resultOrListeners = listener; + } else { + if ((t1 & 4) !== 0) { + source = type$._Future_dynamic._as(_this._resultOrListeners); + if ((source._state & 24) === 0) { + source._addListener$1(listener); + return; + } + _this._cloneResult$1(source); + } + A._rootScheduleMicrotask(null, null, _this._zone, type$.void_Function._as(new A._Future__addListener_closure(_this, listener))); + } + }, + _prependListeners$1(listeners) { + var t1, existingListeners, next, cursor, next0, source, _this = this, _box_0 = {}; + _box_0.listeners = listeners; + if (listeners == null) + return; + t1 = _this._state; + if (t1 <= 3) { + existingListeners = type$.nullable__FutureListener_dynamic_dynamic._as(_this._resultOrListeners); + _this._resultOrListeners = listeners; + if (existingListeners != null) { + next = listeners._nextListener; + for (cursor = listeners; next != null; cursor = next, next = next0) + next0 = next._nextListener; + cursor._nextListener = existingListeners; + } + } else { + if ((t1 & 4) !== 0) { + source = type$._Future_dynamic._as(_this._resultOrListeners); + if ((source._state & 24) === 0) { + source._prependListeners$1(listeners); + return; + } + _this._cloneResult$1(source); + } + _box_0.listeners = _this._reverseListeners$1(listeners); + A._rootScheduleMicrotask(null, null, _this._zone, type$.void_Function._as(new A._Future__prependListeners_closure(_box_0, _this))); + } + }, + _removeListeners$0() { + var current = type$.nullable__FutureListener_dynamic_dynamic._as(this._resultOrListeners); + this._resultOrListeners = null; + return this._reverseListeners$1(current); + }, + _reverseListeners$1(listeners) { + var current, prev, next; + for (current = listeners, prev = null; current != null; prev = current, current = next) { + next = current._nextListener; + current._nextListener = prev; + } + return prev; + }, + _chainForeignFuture$1(source) { + var e, s, exception, _this = this; + _this._state ^= 2; + try { + source.then$1$2$onError(new A._Future__chainForeignFuture_closure(_this), new A._Future__chainForeignFuture_closure0(_this), type$.Null); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + A.scheduleMicrotask(new A._Future__chainForeignFuture_closure1(_this, e, s)); + } + }, + _completeWithValue$1(value) { + var listeners, _this = this; + _this.$ti._precomputed1._as(value); + listeners = _this._removeListeners$0(); + _this._state = 8; + _this._resultOrListeners = value; + A._Future__propagateToListeners(_this, listeners); + }, + _completeWithResultOf$1(source) { + var t1, listeners, _this = this; + if ((source._state & 16) !== 0) { + t1 = _this._zone === source._zone; + t1 = !(t1 || t1); + } else + t1 = false; + if (t1) + return; + listeners = _this._removeListeners$0(); + _this._cloneResult$1(source); + A._Future__propagateToListeners(_this, listeners); + }, + _completeError$2(error, stackTrace) { + var listeners; + type$.Object._as(error); + type$.StackTrace._as(stackTrace); + listeners = this._removeListeners$0(); + this._setErrorObject$1(new A.AsyncError(error, stackTrace)); + A._Future__propagateToListeners(this, listeners); + }, + _asyncComplete$1(value) { + var t1 = this.$ti; + t1._eval$1("1/")._as(value); + if (t1._eval$1("Future<1>")._is(value)) { + this._chainFuture$1(value); + return; + } + this._asyncCompleteWithValue$1(value); + }, + _asyncCompleteWithValue$1(value) { + var _this = this; + _this.$ti._precomputed1._as(value); + _this._state ^= 2; + A._rootScheduleMicrotask(null, null, _this._zone, type$.void_Function._as(new A._Future__asyncCompleteWithValue_closure(_this, value))); + }, + _chainFuture$1(value) { + var t1 = this.$ti; + t1._eval$1("Future<1>")._as(value); + if (t1._is(value)) { + A._Future__chainCoreFuture(value, this, false); + return; + } + this._chainForeignFuture$1(value); + }, + _asyncCompleteError$2(error, stackTrace) { + this._state ^= 2; + A._rootScheduleMicrotask(null, null, this._zone, type$.void_Function._as(new A._Future__asyncCompleteError_closure(this, error, stackTrace))); + }, + $isFuture: 1 + }; + A._Future__addListener_closure.prototype = { + call$0() { + A._Future__propagateToListeners(this.$this, this.listener); + }, + $signature: 0 + }; + A._Future__prependListeners_closure.prototype = { + call$0() { + A._Future__propagateToListeners(this.$this, this._box_0.listeners); + }, + $signature: 0 + }; + A._Future__chainForeignFuture_closure.prototype = { + call$1(value) { + var error, stackTrace, exception, + t1 = this.$this; + t1._state ^= 2; + try { + t1._completeWithValue$1(t1.$ti._precomputed1._as(value)); + } catch (exception) { + error = A.unwrapException(exception); + stackTrace = A.getTraceFromException(exception); + t1._completeError$2(error, stackTrace); + } + }, + $signature: 2 + }; + A._Future__chainForeignFuture_closure0.prototype = { + call$2(error, stackTrace) { + this.$this._completeError$2(type$.Object._as(error), type$.StackTrace._as(stackTrace)); + }, + $signature: 7 + }; + A._Future__chainForeignFuture_closure1.prototype = { + call$0() { + this.$this._completeError$2(this.e, this.s); + }, + $signature: 0 + }; + A._Future__chainCoreFuture_closure.prototype = { + call$0() { + A._Future__chainCoreFuture(this._box_0.source, this.target, true); + }, + $signature: 0 + }; + A._Future__asyncCompleteWithValue_closure.prototype = { + call$0() { + this.$this._completeWithValue$1(this.value); + }, + $signature: 0 + }; + A._Future__asyncCompleteError_closure.prototype = { + call$0() { + this.$this._completeError$2(this.error, this.stackTrace); + }, + $signature: 0 + }; + A._Future__propagateToListeners_handleWhenCompleteCallback.prototype = { + call$0() { + var e, s, t1, exception, t2, t3, originalSource, joinedResult, _this = this, completeResult = null; + try { + t1 = _this._box_0.listener; + completeResult = t1.result._zone.run$1$1(type$.dynamic_Function._as(t1.callback), type$.dynamic); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + if (_this.hasError && type$.AsyncError._as(_this._box_1.source._resultOrListeners).error === e) { + t1 = _this._box_0; + t1.listenerValueOrError = type$.AsyncError._as(_this._box_1.source._resultOrListeners); + } else { + t1 = e; + t2 = s; + if (t2 == null) + t2 = A.AsyncError_defaultStackTrace(t1); + t3 = _this._box_0; + t3.listenerValueOrError = new A.AsyncError(t1, t2); + t1 = t3; + } + t1.listenerHasError = true; + return; + } + if (completeResult instanceof A._Future && (completeResult._state & 24) !== 0) { + if ((completeResult._state & 16) !== 0) { + t1 = _this._box_0; + t1.listenerValueOrError = type$.AsyncError._as(completeResult._resultOrListeners); + t1.listenerHasError = true; + } + return; + } + if (completeResult instanceof A._Future) { + originalSource = _this._box_1.source; + joinedResult = new A._Future(originalSource._zone, originalSource.$ti); + completeResult.then$1$2$onError(new A._Future__propagateToListeners_handleWhenCompleteCallback_closure(joinedResult, originalSource), new A._Future__propagateToListeners_handleWhenCompleteCallback_closure0(joinedResult), type$.void); + t1 = _this._box_0; + t1.listenerValueOrError = joinedResult; + t1.listenerHasError = false; + } + }, + $signature: 0 + }; + A._Future__propagateToListeners_handleWhenCompleteCallback_closure.prototype = { + call$1(__wc0_formal) { + this.joinedResult._completeWithResultOf$1(this.originalSource); + }, + $signature: 2 + }; + A._Future__propagateToListeners_handleWhenCompleteCallback_closure0.prototype = { + call$2(e, s) { + this.joinedResult._completeError$2(type$.Object._as(e), type$.StackTrace._as(s)); + }, + $signature: 7 + }; + A._Future__propagateToListeners_handleValueCallback.prototype = { + call$0() { + var e, s, t1, t2, t3, t4, t5, exception; + try { + t1 = this._box_0; + t2 = t1.listener; + t3 = t2.$ti; + t4 = t3._precomputed1; + t5 = t4._as(this.sourceResult); + t1.listenerValueOrError = t2.result._zone.runUnary$2$2(t3._eval$1("2/(1)")._as(t2.callback), t5, t3._eval$1("2/"), t4); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + t1 = e; + t2 = s; + if (t2 == null) + t2 = A.AsyncError_defaultStackTrace(t1); + t3 = this._box_0; + t3.listenerValueOrError = new A.AsyncError(t1, t2); + t3.listenerHasError = true; + } + }, + $signature: 0 + }; + A._Future__propagateToListeners_handleError.prototype = { + call$0() { + var asyncError, e, s, t1, exception, t2, t3, _this = this; + try { + asyncError = type$.AsyncError._as(_this._box_1.source._resultOrListeners); + t1 = _this._box_0; + if (t1.listener.matchesErrorTest$1(asyncError) && t1.listener.errorCallback != null) { + t1.listenerValueOrError = t1.listener.handleError$1(asyncError); + t1.listenerHasError = false; + } + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + t1 = type$.AsyncError._as(_this._box_1.source._resultOrListeners); + if (t1.error === e) { + t2 = _this._box_0; + t2.listenerValueOrError = t1; + t1 = t2; + } else { + t1 = e; + t2 = s; + if (t2 == null) + t2 = A.AsyncError_defaultStackTrace(t1); + t3 = _this._box_0; + t3.listenerValueOrError = new A.AsyncError(t1, t2); + t1 = t3; + } + t1.listenerHasError = true; + } + }, + $signature: 0 + }; + A._AsyncCallbackEntry.prototype = {}; + A.Stream.prototype = { + get$length(_) { + var t1 = {}, + future = new A._Future($.Zone__current, type$._Future_int); + t1.count = 0; + this.listen$4$cancelOnError$onDone$onError(new A.Stream_length_closure(t1, this), true, new A.Stream_length_closure0(t1, future), future.get$_completeError()); + return future; + } + }; + A.Stream_length_closure.prototype = { + call$1(__wc0_formal) { + this.$this.$ti._precomputed1._as(__wc0_formal); + ++this._box_0.count; + }, + $signature() { + return this.$this.$ti._eval$1("~(1)"); + } + }; + A.Stream_length_closure0.prototype = { + call$0() { + var t1 = this.future, + t2 = t1.$ti, + t3 = t2._eval$1("1/")._as(this._box_0.count), + listeners = t1._removeListeners$0(); + t2._precomputed1._as(t3); + t1._state = 8; + t1._resultOrListeners = t3; + A._Future__propagateToListeners(t1, listeners); + }, + $signature: 0 + }; + A._ControllerStream.prototype = { + get$hashCode(_) { + return (A.Primitives_objectHashCode(this._async$_controller) ^ 892482866) >>> 0; + }, + $eq(_, other) { + if (other == null) + return false; + if (this === other) + return true; + return other instanceof A._BroadcastStream && other._async$_controller === this._async$_controller; + } + }; + A._ControllerSubscription.prototype = { + _onPause$0() { + A._instanceType(this._async$_controller)._eval$1("StreamSubscription<1>")._as(this); + }, + _onResume$0() { + A._instanceType(this._async$_controller)._eval$1("StreamSubscription<1>")._as(this); + } + }; + A._BufferingStreamSubscription.prototype = { + _add$1(data) { + var t2, _this = this, + t1 = A._instanceType(_this); + t1._precomputed1._as(data); + t2 = _this._state; + if ((t2 & 8) !== 0) + return; + if (t2 < 64) + _this._sendData$1(data); + else + _this._addPending$1(new A._DelayedData(data, t1._eval$1("_DelayedData<1>"))); + }, + _onPause$0() { + }, + _onResume$0() { + }, + _addPending$1($event) { + var lastEvent, t1, _this = this, + pending = _this._pending; + if (pending == null) { + pending = new A._PendingEvents(A._instanceType(_this)._eval$1("_PendingEvents<1>")); + _this.set$_pending(pending); + } + lastEvent = pending.lastPendingEvent; + if (lastEvent == null) + pending.firstPendingEvent = pending.lastPendingEvent = $event; + else + pending.lastPendingEvent = lastEvent.next = $event; + t1 = _this._state; + if ((t1 & 128) === 0) { + t1 |= 128; + _this._state = t1; + if (t1 < 256) + pending.schedule$1(_this); + } + }, + _sendData$1(data) { + var t2, _this = this, + t1 = A._instanceType(_this)._precomputed1; + t1._as(data); + t2 = _this._state; + _this._state = t2 | 64; + _this._zone.runUnaryGuarded$1$2(_this._onData, data, t1); + _this._state &= 4294967231; + _this._checkState$1((t2 & 4) !== 0); + }, + _checkState$1(wasInputPaused) { + var t2, isInputPaused, _this = this, + t1 = _this._state; + if ((t1 & 128) !== 0 && _this._pending.lastPendingEvent == null) { + t1 = _this._state = t1 & 4294967167; + t2 = false; + if ((t1 & 4) !== 0) + if (t1 < 256) { + t2 = _this._pending; + t2 = t2 == null ? null : t2.lastPendingEvent == null; + t2 = t2 !== false; + } + if (t2) { + t1 &= 4294967291; + _this._state = t1; + } + } + for (; true; wasInputPaused = isInputPaused) { + if ((t1 & 8) !== 0) { + _this.set$_pending(null); + return; + } + isInputPaused = (t1 & 4) !== 0; + if (wasInputPaused === isInputPaused) + break; + _this._state = t1 ^ 64; + if (isInputPaused) + _this._onPause$0(); + else + _this._onResume$0(); + t1 = _this._state &= 4294967231; + } + if ((t1 & 128) !== 0 && t1 < 256) + _this._pending.schedule$1(_this); + }, + set$_pending(_pending) { + this._pending = A._instanceType(this)._eval$1("_PendingEvents<1>?")._as(_pending); + }, + $isStreamSubscription: 1, + $is_EventDispatch: 1 + }; + A._StreamImpl.prototype = { + listen$4$cancelOnError$onDone$onError(onData, cancelOnError, onDone, onError) { + var t1 = this.$ti; + t1._eval$1("~(1)?")._as(onData); + type$.nullable_void_Function._as(onDone); + return this._async$_controller._subscribe$4(t1._eval$1("~(1)?")._as(onData), onError, onDone, cancelOnError === true); + }, + listen$1(onData) { + return this.listen$4$cancelOnError$onDone$onError(onData, null, null, null); + } + }; + A._DelayedEvent.prototype = {}; + A._DelayedData.prototype = {}; + A._PendingEvents.prototype = { + schedule$1(dispatch) { + var t1, _this = this; + _this.$ti._eval$1("_EventDispatch<1>")._as(dispatch); + t1 = _this._state; + if (t1 === 1) + return; + if (t1 >= 1) { + _this._state = 1; + return; + } + A.scheduleMicrotask(new A._PendingEvents_schedule_closure(_this, dispatch)); + _this._state = 1; + } + }; + A._PendingEvents_schedule_closure.prototype = { + call$0() { + var t2, $event, nextEvent, + t1 = this.$this, + oldState = t1._state; + t1._state = 0; + if (oldState === 3) + return; + t2 = t1.$ti._eval$1("_EventDispatch<1>")._as(this.dispatch); + $event = t1.firstPendingEvent; + nextEvent = $event.next; + t1.firstPendingEvent = nextEvent; + if (nextEvent == null) + t1.lastPendingEvent = null; + A._instanceType($event)._eval$1("_EventDispatch<1>")._as(t2)._sendData$1($event.value); + }, + $signature: 0 + }; + A._DoneStreamSubscription.prototype = { + _onMicrotask$0() { + var _0_0, _this = this, + unscheduledState = _this._state - 1; + if (unscheduledState === 0) { + _this._state = -1; + _0_0 = _this._onDone; + if (_0_0 != null) { + _this.set$_onDone(null); + _this._zone.runGuarded$1(_0_0); + } + } else + _this._state = unscheduledState; + }, + set$_onDone(_onDone) { + this._onDone = type$.nullable_void_Function._as(_onDone); + }, + $isStreamSubscription: 1 + }; + A._StreamIterator.prototype = {}; + A._Zone.prototype = {$isZone: 1}; + A._rootHandleError_closure.prototype = { + call$0() { + A.Error_throwWithStackTrace(this.error, this.stackTrace); + }, + $signature: 0 + }; + A._RootZone.prototype = { + runGuarded$1(f) { + var e, s, exception; + type$.void_Function._as(f); + try { + if (B.C__RootZone === $.Zone__current) { + f.call$0(); + return; + } + A._rootRun(null, null, this, f, type$.void); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + A._rootHandleError(type$.Object._as(e), type$.StackTrace._as(s)); + } + }, + runUnaryGuarded$1$2(f, arg, $T) { + var e, s, exception; + $T._eval$1("~(0)")._as(f); + $T._as(arg); + try { + if (B.C__RootZone === $.Zone__current) { + f.call$1(arg); + return; + } + A._rootRunUnary(null, null, this, f, arg, type$.void, $T); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + A._rootHandleError(type$.Object._as(e), type$.StackTrace._as(s)); + } + }, + bindCallbackGuarded$1(f) { + return new A._RootZone_bindCallbackGuarded_closure(this, type$.void_Function._as(f)); + }, + $index(_, key) { + return null; + }, + run$1$1(f, $R) { + $R._eval$1("0()")._as(f); + if ($.Zone__current === B.C__RootZone) + return f.call$0(); + return A._rootRun(null, null, this, f, $R); + }, + runUnary$2$2(f, arg, $R, $T) { + $R._eval$1("@<0>")._bind$1($T)._eval$1("1(2)")._as(f); + $T._as(arg); + if ($.Zone__current === B.C__RootZone) + return f.call$1(arg); + return A._rootRunUnary(null, null, this, f, arg, $R, $T); + }, + runBinary$3$3(f, arg1, arg2, $R, T1, T2) { + $R._eval$1("@<0>")._bind$1(T1)._bind$1(T2)._eval$1("1(2,3)")._as(f); + T1._as(arg1); + T2._as(arg2); + if ($.Zone__current === B.C__RootZone) + return f.call$2(arg1, arg2); + return A._rootRunBinary(null, null, this, f, arg1, arg2, $R, T1, T2); + }, + registerBinaryCallback$3$1(f, $R, T1, T2) { + return $R._eval$1("@<0>")._bind$1(T1)._bind$1(T2)._eval$1("1(2,3)")._as(f); + } + }; + A._RootZone_bindCallbackGuarded_closure.prototype = { + call$0() { + return this.$this.runGuarded$1(this.f); + }, + $signature: 0 + }; + A._HashMap.prototype = { + get$length(_) { + return this._collection$_length; + }, + get$keys() { + return new A._HashMapKeyIterable(this, this.$ti._eval$1("_HashMapKeyIterable<1>")); + }, + containsKey$1(key) { + var strings, nums; + if (typeof key == "string" && key !== "__proto__") { + strings = this._collection$_strings; + return strings == null ? false : strings[key] != null; + } else if (typeof key == "number" && (key & 1073741823) === key) { + nums = this._collection$_nums; + return nums == null ? false : nums[key] != null; + } else + return this._containsKey$1(key); + }, + _containsKey$1(key) { + var rest = this._collection$_rest; + if (rest == null) + return false; + return this._findBucketIndex$2(this._getBucket$2(rest, key), key) >= 0; + }, + $index(_, key) { + var strings, t1, nums; + if (typeof key == "string" && key !== "__proto__") { + strings = this._collection$_strings; + t1 = strings == null ? null : A._HashMap__getTableEntry(strings, key); + return t1; + } else if (typeof key == "number" && (key & 1073741823) === key) { + nums = this._collection$_nums; + t1 = nums == null ? null : A._HashMap__getTableEntry(nums, key); + return t1; + } else + return this._get$1(key); + }, + _get$1(key) { + var bucket, index, + rest = this._collection$_rest; + if (rest == null) + return null; + bucket = this._getBucket$2(rest, key); + index = this._findBucketIndex$2(bucket, key); + return index < 0 ? null : bucket[index + 1]; + }, + $indexSet(_, key, value) { + var strings, nums, rest, hash, bucket, index, _this = this, + t1 = _this.$ti; + t1._precomputed1._as(key); + t1._rest[1]._as(value); + if (typeof key == "string" && key !== "__proto__") { + strings = _this._collection$_strings; + _this._addHashTableEntry$3(strings == null ? _this._collection$_strings = A._HashMap__newHashTable() : strings, key, value); + } else if (typeof key == "number" && (key & 1073741823) === key) { + nums = _this._collection$_nums; + _this._addHashTableEntry$3(nums == null ? _this._collection$_nums = A._HashMap__newHashTable() : nums, key, value); + } else { + rest = _this._collection$_rest; + if (rest == null) + rest = _this._collection$_rest = A._HashMap__newHashTable(); + hash = A.objectHashCode(key) & 1073741823; + bucket = rest[hash]; + if (bucket == null) { + A._HashMap__setTableEntry(rest, hash, [key, value]); + ++_this._collection$_length; + _this._collection$_keys = null; + } else { + index = _this._findBucketIndex$2(bucket, key); + if (index >= 0) + bucket[index + 1] = value; + else { + bucket.push(key, value); + ++_this._collection$_length; + _this._collection$_keys = null; + } + } + } + }, + forEach$1(_, action) { + var keys, $length, t2, i, key, t3, _this = this, + t1 = _this.$ti; + t1._eval$1("~(1,2)")._as(action); + keys = _this._computeKeys$0(); + for ($length = keys.length, t2 = t1._precomputed1, t1 = t1._rest[1], i = 0; i < $length; ++i) { + key = keys[i]; + t2._as(key); + t3 = _this.$index(0, key); + action.call$2(key, t3 == null ? t1._as(t3) : t3); + if (keys !== _this._collection$_keys) + throw A.wrapException(A.ConcurrentModificationError$(_this)); + } + }, + _computeKeys$0() { + var strings, index, names, entries, i, nums, rest, bucket, $length, i0, _this = this, + result = _this._collection$_keys; + if (result != null) + return result; + result = A.List_List$filled(_this._collection$_length, null, false, type$.dynamic); + strings = _this._collection$_strings; + index = 0; + if (strings != null) { + names = Object.getOwnPropertyNames(strings); + entries = names.length; + for (i = 0; i < entries; ++i) { + result[index] = names[i]; + ++index; + } + } + nums = _this._collection$_nums; + if (nums != null) { + names = Object.getOwnPropertyNames(nums); + entries = names.length; + for (i = 0; i < entries; ++i) { + result[index] = +names[i]; + ++index; + } + } + rest = _this._collection$_rest; + if (rest != null) { + names = Object.getOwnPropertyNames(rest); + entries = names.length; + for (i = 0; i < entries; ++i) { + bucket = rest[names[i]]; + $length = bucket.length; + for (i0 = 0; i0 < $length; i0 += 2) { + result[index] = bucket[i0]; + ++index; + } + } + } + return _this._collection$_keys = result; + }, + _addHashTableEntry$3(table, key, value) { + var t1 = this.$ti; + t1._precomputed1._as(key); + t1._rest[1]._as(value); + if (table[key] == null) { + ++this._collection$_length; + this._collection$_keys = null; + } + A._HashMap__setTableEntry(table, key, value); + }, + _getBucket$2(table, key) { + return table[A.objectHashCode(key) & 1073741823]; + } + }; + A._IdentityHashMap.prototype = { + _findBucketIndex$2(bucket, key) { + var $length, i, t1; + if (bucket == null) + return -1; + $length = bucket.length; + for (i = 0; i < $length; i += 2) { + t1 = bucket[i]; + if (t1 == null ? key == null : t1 === key) + return i; + } + return -1; + } + }; + A._HashMapKeyIterable.prototype = { + get$length(_) { + return this._collection$_map._collection$_length; + }, + get$iterator(_) { + var t1 = this._collection$_map; + return new A._HashMapKeyIterator(t1, t1._computeKeys$0(), this.$ti._eval$1("_HashMapKeyIterator<1>")); + } + }; + A._HashMapKeyIterator.prototype = { + get$current() { + var t1 = this._collection$_current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var _this = this, + keys = _this._collection$_keys, + offset = _this._offset, + t1 = _this._collection$_map; + if (keys !== t1._collection$_keys) + throw A.wrapException(A.ConcurrentModificationError$(t1)); + else if (offset >= keys.length) { + _this.set$_collection$_current(null); + return false; + } else { + _this.set$_collection$_current(keys[offset]); + _this._offset = offset + 1; + return true; + } + }, + set$_collection$_current(_current) { + this._collection$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.ListBase.prototype = { + get$iterator(receiver) { + return new A.ListIterator(receiver, this.get$length(receiver), A.instanceType(receiver)._eval$1("ListIterator")); + }, + elementAt$1(receiver, index) { + return this.$index(receiver, index); + }, + map$1$1(receiver, f, $T) { + var t1 = A.instanceType(receiver); + return new A.MappedListIterable(receiver, t1._bind$1($T)._eval$1("1(ListBase.E)")._as(f), t1._eval$1("@")._bind$1($T)._eval$1("MappedListIterable<1,2>")); + }, + toString$0(receiver) { + return A.Iterable_iterableToFullString(receiver, "[", "]"); + } + }; + A.MapBase.prototype = { + forEach$1(_, action) { + var t2, key, t3, + t1 = A._instanceType(this); + t1._eval$1("~(1,2)")._as(action); + for (t2 = this.get$keys(), t2 = t2.get$iterator(t2), t1 = t1._rest[1]; t2.moveNext$0();) { + key = t2.get$current(); + t3 = this.$index(0, key); + action.call$2(key, t3 == null ? t1._as(t3) : t3); + } + }, + get$length(_) { + var t1 = this.get$keys(); + return t1.get$length(t1); + }, + toString$0(_) { + return A.MapBase_mapToString(this); + }, + $isMap: 1 + }; + A.MapBase_mapToString_closure.prototype = { + call$2(k, v) { + var t2, + t1 = this._box_0; + if (!t1.first) + this.result._contents += ", "; + t1.first = false; + t1 = this.result; + t2 = A.S(k); + t2 = t1._contents += t2; + t1._contents = t2 + ": "; + t2 = A.S(v); + t1._contents += t2; + }, + $signature: 19 + }; + A._UnmodifiableMapMixin.prototype = {}; + A.MapView.prototype = { + $index(_, key) { + return this._collection$_map.$index(0, key); + }, + forEach$1(_, action) { + this._collection$_map.forEach$1(0, A._instanceType(this)._eval$1("~(1,2)")._as(action)); + }, + get$length(_) { + return this._collection$_map.__js_helper$_length; + }, + get$keys() { + var t1 = this._collection$_map; + return new A.LinkedHashMapKeysIterable(t1, A._instanceType(t1)._eval$1("LinkedHashMapKeysIterable<1>")); + }, + toString$0(_) { + return A.MapBase_mapToString(this._collection$_map); + }, + $isMap: 1 + }; + A.UnmodifiableMapView.prototype = {}; + A._UnmodifiableMapView_MapView__UnmodifiableMapMixin.prototype = {}; + A.Base64Codec.prototype = {}; + A.Base64Encoder.prototype = { + convert$1(input) { + var t1; + type$.List_int._as(input); + t1 = input.length; + if (t1 === 0) + return ""; + t1 = new A._Base64Encoder("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").encode$4(input, 0, t1, true); + t1.toString; + return A.String_String$fromCharCodes(t1); + } + }; + A._Base64Encoder.prototype = { + encode$4(bytes, start, end, isLast) { + var t1, byteCount, fullChunks, bufferLength, output; + type$.List_int._as(bytes); + t1 = this._convert$_state; + byteCount = (t1 & 3) + (end - start); + fullChunks = B.JSInt_methods._tdivFast$1(byteCount, 3); + bufferLength = fullChunks * 4; + if (byteCount - fullChunks * 3 > 0) + bufferLength += 4; + output = new Uint8Array(bufferLength); + this._convert$_state = A._Base64Encoder_encodeChunk(this._alphabet, bytes, start, end, true, output, 0, t1); + if (bufferLength > 0) + return output; + return null; + } + }; + A.Base64Decoder.prototype = { + convert$1(input) { + var decoder, t1, t2, + end = A.RangeError_checkValidRange(0, null, input.length); + if (0 === end) + return new Uint8Array(0); + decoder = new A._Base64Decoder(); + t1 = decoder.decode$3(input, 0, end); + t1.toString; + t2 = decoder._convert$_state; + if (t2 < -1) + A.throwExpression(A.FormatException$("Missing padding character", input, end)); + if (t2 > 0) + A.throwExpression(A.FormatException$("Invalid length, must be multiple of four", input, end)); + decoder._convert$_state = -1; + return t1; + } + }; + A._Base64Decoder.prototype = { + decode$3(input, start, end) { + var buffer, _this = this, + t1 = _this._convert$_state; + if (t1 < 0) { + _this._convert$_state = A._Base64Decoder__checkPadding(input, start, end, t1); + return null; + } + if (start === end) + return new Uint8Array(0); + buffer = A._Base64Decoder__allocateBuffer(input, start, end, t1); + _this._convert$_state = A._Base64Decoder_decodeChunk(input, start, end, buffer, 0, _this._convert$_state); + return buffer; + } + }; + A.Codec.prototype = {}; + A.Converter.prototype = {}; + A.NoSuchMethodError_toString_closure.prototype = { + call$2(key, value) { + var t1, t2, t3; + type$.Symbol._as(key); + t1 = this.sb; + t2 = this._box_0; + t3 = t1._contents += t2.comma; + t3 += key.__internal$_name; + t1._contents = t3; + t1._contents = t3 + ": "; + t3 = A.Error_safeToString(value); + t1._contents += t3; + t2.comma = ", "; + }, + $signature: 20 + }; + A.DateTime.prototype = { + $eq(_, other) { + if (other == null) + return false; + return other instanceof A.DateTime && this._value === other._value && this._microsecond === other._microsecond && this.isUtc === other.isUtc; + }, + get$hashCode(_) { + return A.Object_hash(this._value, this._microsecond); + }, + toString$0(_) { + var _this = this, + y = A.DateTime__fourDigits(A.Primitives_getYear(_this)), + m = A.DateTime__twoDigits(A.Primitives_getMonth(_this)), + d = A.DateTime__twoDigits(A.Primitives_getDay(_this)), + h = A.DateTime__twoDigits(A.Primitives_getHours(_this)), + min = A.DateTime__twoDigits(A.Primitives_getMinutes(_this)), + sec = A.DateTime__twoDigits(A.Primitives_getSeconds(_this)), + ms = A.DateTime__threeDigits(A.Primitives_getMilliseconds(_this)), + t1 = _this._microsecond, + us = t1 === 0 ? "" : A.DateTime__threeDigits(t1); + t1 = y + "-" + m; + if (_this.isUtc) + return t1 + "-" + d + " " + h + ":" + min + ":" + sec + "." + ms + us + "Z"; + else + return t1 + "-" + d + " " + h + ":" + min + ":" + sec + "." + ms + us; + } + }; + A._Enum.prototype = { + toString$0(_) { + return this._enumToString$0(); + } + }; + A.Error.prototype = { + get$stackTrace() { + return A.Primitives_extractStackTrace(this); + } + }; + A.AssertionError.prototype = { + toString$0(_) { + var t1 = this.message; + if (t1 != null) + return "Assertion failed: " + A.Error_safeToString(t1); + return "Assertion failed"; + } + }; + A.TypeError.prototype = {}; + A.ArgumentError.prototype = { + get$_errorName() { + return "Invalid argument" + (!this._hasValue ? "(s)" : ""); + }, + get$_errorExplanation() { + return ""; + }, + toString$0(_) { + var _this = this, + $name = _this.name, + nameString = $name == null ? "" : " (" + $name + ")", + message = _this.message, + messageString = message == null ? "" : ": " + A.S(message), + prefix = _this.get$_errorName() + nameString + messageString; + if (!_this._hasValue) + return prefix; + return prefix + _this.get$_errorExplanation() + ": " + A.Error_safeToString(_this.get$invalidValue()); + }, + get$invalidValue() { + return this.invalidValue; + } + }; + A.RangeError.prototype = { + get$invalidValue() { + return A._asNumQ(this.invalidValue); + }, + get$_errorName() { + return "RangeError"; + }, + get$_errorExplanation() { + var explanation, + start = this.start, + end = this.end; + if (start == null) + explanation = end != null ? ": Not less than or equal to " + A.S(end) : ""; + else if (end == null) + explanation = ": Not greater than or equal to " + A.S(start); + else if (end > start) + explanation = ": Not in inclusive range " + A.S(start) + ".." + A.S(end); + else + explanation = end < start ? ": Valid value range is empty" : ": Only valid value is " + A.S(start); + return explanation; + } + }; + A.IndexError.prototype = { + get$invalidValue() { + return A._asInt(this.invalidValue); + }, + get$_errorName() { + return "RangeError"; + }, + get$_errorExplanation() { + if (A._asInt(this.invalidValue) < 0) + return ": index must not be negative"; + var t1 = this.length; + if (t1 === 0) + return ": no indices are valid"; + return ": index should be less than " + t1; + }, + get$length(receiver) { + return this.length; + } + }; + A.NoSuchMethodError.prototype = { + toString$0(_) { + var $arguments, t1, _i, t2, t3, argument, receiverText, actualParameters, _this = this, _box_0 = {}, + sb = new A.StringBuffer(""); + _box_0.comma = ""; + $arguments = _this._core$_arguments; + for (t1 = $arguments.length, _i = 0, t2 = "", t3 = ""; _i < t1; ++_i, t3 = ", ") { + argument = $arguments[_i]; + sb._contents = t2 + t3; + t2 = A.Error_safeToString(argument); + t2 = sb._contents += t2; + _box_0.comma = ", "; + } + _this._namedArguments.forEach$1(0, new A.NoSuchMethodError_toString_closure(_box_0, sb)); + receiverText = A.Error_safeToString(_this._core$_receiver); + actualParameters = sb.toString$0(0); + return "NoSuchMethodError: method not found: '" + _this._core$_memberName.__internal$_name + "'\nReceiver: " + receiverText + "\nArguments: [" + actualParameters + "]"; + } + }; + A.UnsupportedError.prototype = { + toString$0(_) { + return "Unsupported operation: " + this.message; + } + }; + A.UnimplementedError.prototype = { + toString$0(_) { + return "UnimplementedError: " + this.message; + } + }; + A.StateError.prototype = { + toString$0(_) { + return "Bad state: " + this.message; + } + }; + A.ConcurrentModificationError.prototype = { + toString$0(_) { + var t1 = this.modifiedObject; + if (t1 == null) + return "Concurrent modification during iteration."; + return "Concurrent modification during iteration: " + A.Error_safeToString(t1) + "."; + } + }; + A.OutOfMemoryError.prototype = { + toString$0(_) { + return "Out of Memory"; + }, + get$stackTrace() { + return null; + }, + $isError: 1 + }; + A.StackOverflowError.prototype = { + toString$0(_) { + return "Stack Overflow"; + }, + get$stackTrace() { + return null; + }, + $isError: 1 + }; + A._Exception.prototype = { + toString$0(_) { + return "Exception: " + this.message; + } + }; + A.FormatException.prototype = { + toString$0(_) { + var lineEnd, lineNum, lineStart, previousCharWasCR, i, char, prefix, postfix, end, start, + message = this.message, + report = "" !== message ? "FormatException: " + message : "FormatException", + offset = this.offset, + source = this.source, + t1 = offset < 0 || offset > source.length; + if (t1) + offset = null; + if (offset == null) { + if (source.length > 78) + source = B.JSString_methods.substring$2(source, 0, 75) + "..."; + return report + "\n" + source; + } + for (lineEnd = source.length, lineNum = 1, lineStart = 0, previousCharWasCR = false, i = 0; i < offset; ++i) { + if (!(i < lineEnd)) + return A.ioore(source, i); + char = source.charCodeAt(i); + if (char === 10) { + if (lineStart !== i || !previousCharWasCR) + ++lineNum; + lineStart = i + 1; + previousCharWasCR = false; + } else if (char === 13) { + ++lineNum; + lineStart = i + 1; + previousCharWasCR = true; + } + } + report = lineNum > 1 ? report + (" (at line " + lineNum + ", character " + (offset - lineStart + 1) + ")\n") : report + (" (at character " + (offset + 1) + ")\n"); + for (i = offset; i < lineEnd; ++i) { + if (!(i >= 0)) + return A.ioore(source, i); + char = source.charCodeAt(i); + if (char === 10 || char === 13) { + lineEnd = i; + break; + } + } + prefix = ""; + if (lineEnd - lineStart > 78) { + postfix = "..."; + if (offset - lineStart < 75) { + end = lineStart + 75; + start = lineStart; + } else { + if (lineEnd - offset < 75) { + start = lineEnd - 75; + end = lineEnd; + postfix = ""; + } else { + start = offset - 36; + end = offset + 36; + } + prefix = "..."; + } + } else { + end = lineEnd; + start = lineStart; + postfix = ""; + } + return report + prefix + B.JSString_methods.substring$2(source, start, end) + postfix + "\n" + B.JSString_methods.$mul(" ", offset - start + prefix.length) + "^\n"; + } + }; + A.Iterable.prototype = { + map$1$1(_, toElement, $T) { + var t1 = A._instanceType(this); + return A.MappedIterable_MappedIterable(this, t1._bind$1($T)._eval$1("1(Iterable.E)")._as(toElement), t1._eval$1("Iterable.E"), $T); + }, + get$length(_) { + var count, + it = this.get$iterator(this); + for (count = 0; it.moveNext$0();) + ++count; + return count; + }, + elementAt$1(_, index) { + var iterator, skipCount; + A.RangeError_checkNotNegative(index, "index"); + iterator = this.get$iterator(this); + for (skipCount = index; iterator.moveNext$0();) { + if (skipCount === 0) + return iterator.get$current(); + --skipCount; + } + throw A.wrapException(A.IndexError$withLength(index, index - skipCount, this, "index")); + }, + toString$0(_) { + return A.Iterable_iterableToShortString(this, "(", ")"); + } + }; + A.Null.prototype = { + get$hashCode(_) { + return A.Object.prototype.get$hashCode.call(this, 0); + }, + toString$0(_) { + return "null"; + } + }; + A.Object.prototype = {$isObject: 1, + $eq(_, other) { + return this === other; + }, + get$hashCode(_) { + return A.Primitives_objectHashCode(this); + }, + toString$0(_) { + return "Instance of '" + A.Primitives_objectTypeName(this) + "'"; + }, + noSuchMethod$1(_, invocation) { + throw A.wrapException(A.NoSuchMethodError_NoSuchMethodError$withInvocation(this, type$.Invocation._as(invocation))); + }, + get$runtimeType(_) { + return A.getRuntimeTypeOfDartObject(this); + }, + toString() { + return this.toString$0(this); + } + }; + A._StringStackTrace.prototype = { + toString$0(_) { + return ""; + }, + $isStackTrace: 1 + }; + A.StringBuffer.prototype = { + get$length(_) { + return this._contents.length; + }, + toString$0(_) { + var t1 = this._contents; + return t1.charCodeAt(0) == 0 ? t1 : t1; + } + }; + A.jsify__convert.prototype = { + call$1(o) { + var t1, convertedMap, key, convertedList; + if (A._noJsifyRequired(o)) + return o; + t1 = this._convertedObjects; + if (t1.containsKey$1(o)) + return t1.$index(0, o); + if (type$.Map_of_nullable_Object_and_nullable_Object._is(o)) { + convertedMap = {}; + t1.$indexSet(0, o, convertedMap); + for (t1 = o.get$keys(), t1 = t1.get$iterator(t1); t1.moveNext$0();) { + key = t1.get$current(); + convertedMap[key] = this.call$1(o.$index(0, key)); + } + return convertedMap; + } else if (type$.Iterable_nullable_Object._is(o)) { + convertedList = []; + t1.$indexSet(0, o, convertedList); + B.JSArray_methods.addAll$1(convertedList, J.map$1$1$ax(o, this, type$.dynamic)); + return convertedList; + } else + return o; + }, + $signature: 8 + }; + A.promiseToFuture_closure.prototype = { + call$1(r) { + return this.completer.complete$1(this.T._eval$1("0/?")._as(r)); + }, + $signature: 3 + }; + A.promiseToFuture_closure0.prototype = { + call$1(e) { + if (e == null) + return this.completer.completeError$1(new A.NullRejectionException(e === undefined)); + return this.completer.completeError$1(e); + }, + $signature: 3 + }; + A.dartify_convert.prototype = { + call$1(o) { + var t1, millisSinceEpoch, proto, t2, dartObject, originalKeys, dartKeys, i, jsKey, dartKey, l, $length; + if (A._noDartifyRequired(o)) + return o; + t1 = this._convertedObjects; + o.toString; + if (t1.containsKey$1(o)) + return t1.$index(0, o); + if (o instanceof Date) { + millisSinceEpoch = o.getTime(); + if (millisSinceEpoch < -864e13 || millisSinceEpoch > 864e13) + A.throwExpression(A.RangeError$range(millisSinceEpoch, -864e13, 864e13, "millisecondsSinceEpoch", null)); + A.checkNotNullable(true, "isUtc", type$.bool); + return new A.DateTime(millisSinceEpoch, 0, true); + } + if (o instanceof RegExp) + throw A.wrapException(A.ArgumentError$("structured clone of RegExp", null)); + if (typeof Promise != "undefined" && o instanceof Promise) + return A.promiseToFuture(o, type$.nullable_Object); + proto = Object.getPrototypeOf(o); + if (proto === Object.prototype || proto === null) { + t2 = type$.nullable_Object; + dartObject = A.LinkedHashMap_LinkedHashMap$_empty(t2, t2); + t1.$indexSet(0, o, dartObject); + originalKeys = Object.keys(o); + dartKeys = []; + for (t1 = J.getInterceptor$ax(originalKeys), t2 = t1.get$iterator(originalKeys); t2.moveNext$0();) + dartKeys.push(A.dartify(t2.get$current())); + for (i = 0; i < t1.get$length(originalKeys); ++i) { + jsKey = t1.$index(originalKeys, i); + if (!(i < dartKeys.length)) + return A.ioore(dartKeys, i); + dartKey = dartKeys[i]; + if (jsKey != null) + dartObject.$indexSet(0, dartKey, this.call$1(o[jsKey])); + } + return dartObject; + } + if (o instanceof Array) { + l = o; + dartObject = []; + t1.$indexSet(0, o, dartObject); + $length = A._asInt(o.length); + for (t1 = J.getInterceptor$asx(l), i = 0; i < $length; ++i) + dartObject.push(this.call$1(t1.$index(l, i))); + return dartObject; + } + return o; + }, + $signature: 8 + }; + A.NullRejectionException.prototype = { + toString$0(_) { + return "Promise was rejected with a value of `" + (this.isUndefined ? "undefined" : "null") + "`."; + } + }; + A._JSSecureRandom.prototype = { + _JSSecureRandom$0() { + var $crypto = self.crypto; + if ($crypto != null) + if ($crypto.getRandomValues != null) + return; + throw A.wrapException(A.UnsupportedError$("No source of cryptographically secure random numbers available.")); + }, + nextInt$1(max) { + var byteCount, t1, start, randomLimit, t2, t3, random, result, _null = null; + if (max <= 0 || max > 4294967296) + throw A.wrapException(new A.RangeError(_null, _null, false, _null, _null, "max must be in range 0 < max \u2264 2^32, was " + max)); + if (max > 255) + if (max > 65535) + byteCount = max > 16777215 ? 4 : 3; + else + byteCount = 2; + else + byteCount = 1; + t1 = this._math$_buffer; + t1.$flags & 2 && A.throwUnsupportedOperation(t1, 11); + t1.setUint32(0, 0, false); + start = 4 - byteCount; + randomLimit = A._asInt(Math.pow(256, byteCount)); + for (t2 = max - 1, t3 = (max & t2) === 0; true;) { + crypto.getRandomValues(J.asUint8List$2$x(B.NativeByteData_methods.get$buffer(t1), start, byteCount)); + random = t1.getUint32(0, false); + if (t3) + return (random & t2) >>> 0; + result = random % max; + if (random - result + max < randomLimit) + return result; + } + } + }; + A.CryptorError.prototype = { + _enumToString$0() { + return "CryptorError." + this._name; + } + }; + A.FrameInfo.prototype = {}; + A.FrameCryptor.prototype = { + get$enabled() { + if (this.participantIdentity == null) + return false; + return this._enabled; + }, + setupTransform$6$codec$kind$operation$readable$trackId$writable(codec, kind, operation, readable, trackId, writable) { + return this.setupTransform$body$FrameCryptor(codec, kind, operation, readable, trackId, writable); + }, + setupTransform$5$kind$operation$readable$trackId$writable(kind, operation, readable, trackId, writable) { + return this.setupTransform$6$codec$kind$operation$readable$trackId$writable(null, kind, operation, readable, trackId, writable); + }, + setupTransform$body$FrameCryptor(codec, kind, operation, readable, trackId, writable) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, transformer, e, t2, t3, t4, t5, exception, t1; + var $async$setupTransform$6$codec$kind$operation$readable$trackId$writable = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $.$get$logger(); + t1.log$4(B.Level_INFO_800, "setupTransform " + operation + " kind " + kind, null, null); + $async$self.__FrameCryptor_kind_A = kind; + if (codec != null) { + t1.log$4(B.Level_INFO_800, "setting codec on cryptor to " + codec, null, null); + $async$self.codec = codec; + } + t1 = self.TransformStream; + t2 = operation === "encode" ? $async$self.get$encodeFunction() : $async$self.get$decodeFunction(); + t3 = type$.Future_void_Function_JSObject_JSObject; + t4 = type$.String; + t5 = type$.JSObject; + transformer = t5._as(new t1(t5._as(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["transform", A.allowInterop(t2, t3)], t4, t3))))); + try { + t5._as(t5._as(readable.pipeThrough(transformer)).pipeTo(writable)); + } catch (exception) { + e = A.unwrapException(exception); + $.$get$logger().log$4(B.Level_WARNING_900, "e " + J.toString$0$(e), null, null); + if ($async$self.lastError !== B.CryptorError_7) { + $async$self.lastError = B.CryptorError_7; + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", $async$self.participantIdentity, "state", "internalError", "error", "Internal error: " + J.toString$0$(e)], t4, type$.nullable_String))); + } + } + $async$self.trackId = trackId; + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$setupTransform$6$codec$kind$operation$readable$trackId$writable, $async$completer); + }, + getUnencryptedBytes$2(obj, codec) { + var naluIndices, t1, t2, _i, index, type, _null = null, frameType = "", + data = A.NativeUint8List_NativeUint8List$view(type$.NativeByteBuffer._as(obj.data), 0, _null); + if ("type" in obj) { + frameType = A._asString(obj.type); + $.$get$logger().log$4(B.Level_FINER_400, "frameType: " + frameType, _null, _null); + } + if (codec != null && codec.toLowerCase() === "h264") { + type$.Uint8List._as(data); + naluIndices = A.findNALUIndices(data); + for (t1 = naluIndices.length, t2 = data.length, _i = 0; _i < naluIndices.length; naluIndices.length === t1 || (0, A.throwConcurrentModificationError)(naluIndices), ++_i) { + index = naluIndices[_i]; + if (!(index < t2)) + return A.ioore(data, index); + type = data[index] & 31; + switch (type) { + case 5: + case 1: + t1 = index + 2; + $.$get$logger().log$4(B.Level_FINER_400, "unEncryptedBytes NALU of type " + type + ", offset " + t1, _null, _null); + return t1; + default: + $.$get$logger().log$4(B.Level_FINER_400, "skipping NALU of type " + type, _null, _null); + break; + } + } + throw A.wrapException(A.Exception_Exception("Could not find NALU")); + } + switch (frameType) { + case "key": + return 10; + case "delta": + return 3; + case "audio": + return 1; + default: + return 0; + } + }, + readFrameInfo$1(frameObj) { + var buffer, frameType, t1, synchronizationSource, timestamp; + new Uint8Array(0); + buffer = A.NativeUint8List_NativeUint8List$view(type$.NativeByteBuffer._as(frameObj.data), 0, null); + if ("type" in frameObj) { + frameType = A._asString(frameObj.type); + $.$get$logger().log$4(B.Level_FINER_400, "frameType: " + frameType, null, null); + } else + frameType = ""; + t1 = type$.JSObject; + synchronizationSource = A._asInt(t1._as(frameObj.getMetadata()).synchronizationSource); + if ("rtpTimestamp" in t1._as(frameObj.getMetadata())) + timestamp = B.JSInt_methods.toInt$0(A._asInt(t1._as(frameObj.getMetadata()).rtpTimestamp)); + else + timestamp = "timestamp" in frameObj ? A._asInt(A._asDouble(frameObj.timestamp)) : 0; + return new A.FrameInfo(frameType, synchronizationSource, timestamp, buffer); + }, + enqueueFrame$3(frameObj, controller, buffer) { + var t1 = type$.NativeByteBuffer._as(B.NativeUint8List_methods.get$buffer(buffer.toBytes$0())); + frameObj.data = t1; + controller.enqueue(frameObj); + }, + encodeFunction$2(frameObj, controller) { + var t1 = type$.JSObject; + return this.encodeFunction$body$FrameCryptor(t1._as(frameObj), t1._as(controller)); + }, + encodeFunction$body$FrameCryptor(frameObj, controller) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$returnValue, $async$handler = 2, $async$errorStack = [], $async$self = this, srcFrame, secretKey, keyIndex, headerLength, iv, frameTrailer, cipherText, finalBuffer, e, t1, t2, t3, t4, iv0, sendCount, t5, t6, t7, exception, $async$exception, $async$temp1; + var $async$encodeFunction$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) { + $async$errorStack.push($async$result); + $async$goto = $async$handler; + } + while (true) + switch ($async$goto) { + case 0: + // Function start + $async$handler = 4; + t1 = true; + if ($async$self.get$enabled()) { + t2 = type$.NativeByteBuffer; + if (!(t2._as(frameObj.data).byteLength === 0)) + t1 = t2._as(frameObj.data).byteLength === 0; + } + if (t1) { + if ($async$self.keyHandler.keyOptions.discardFrameWhenCryptorNotReady) { + // goto return + $async$goto = 1; + break; + } + controller.enqueue(frameObj); + // goto return + $async$goto = 1; + break; + } + srcFrame = $async$self.readFrameInfo$1(frameObj); + t1 = $.$get$logger(); + t1.log$4(B.Level_FINE_500, "encodeFunction: buffer " + srcFrame.buffer.length + ", synchronizationSource " + srcFrame.ssrc + " frameType " + srcFrame.frameType, null, null); + t2 = $async$self.keyHandler.getKeySet$1($async$self.currentKeyIndex); + secretKey = t2 == null ? null : t2.encryptionKey; + keyIndex = $async$self.currentKeyIndex; + if (secretKey == null) { + if ($async$self.lastError !== B.CryptorError_5) { + $async$self.lastError = B.CryptorError_5; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + t3 = $async$self.__FrameCryptor_kind_A; + t3 === $ && A.throwLateFieldNI("kind"); + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", t3, "state", "missingKey", "error", "Missing key for track " + t2], type$.String, type$.nullable_String))); + } + // goto return + $async$goto = 1; + break; + } + t2 = $async$self.__FrameCryptor_kind_A; + t2 === $ && A.throwLateFieldNI("kind"); + headerLength = t2 === "video" ? $async$self.getUnencryptedBytes$2(frameObj, $async$self.codec) : 1; + t3 = srcFrame.ssrc; + t4 = srcFrame.timestamp; + iv0 = new DataView(new ArrayBuffer(12)); + t2 = $async$self.sendCounts; + if (t2.$index(0, t3) == null) + t2.$indexSet(0, t3, $.$get$Random__secureRandom().nextInt$1(65535)); + sendCount = t2.$index(0, t3); + if (sendCount == null) + sendCount = 0; + iv0.setUint32(0, t3, false); + iv0.setUint32(4, t4, false); + iv0.setUint32(8, t4 - B.JSInt_methods.$mod(sendCount, 65535), false); + t2.$indexSet(0, t3, sendCount + 1); + iv = J.asUint8List$0$x(B.NativeByteData_methods.get$buffer(iv0)); + frameTrailer = new DataView(new ArrayBuffer(2)); + t2 = frameTrailer; + t2.$flags & 2 && A.throwUnsupportedOperation(t2, 6); + J._setInt8$2$x(t2, 0, 12); + t2 = frameTrailer; + t3 = A._asInt(keyIndex); + t2.$flags & 2 && A.throwUnsupportedOperation(t2, 6); + J._setInt8$2$x(t2, 1, t3); + t3 = $async$self.worker; + t2 = type$.JSObject; + t4 = t2._as(t2._as(t3.crypto).subtle); + t5 = type$.String; + t6 = type$.Object; + t7 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "AES-GCM", "iv", iv, "additionalData", B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, headerLength)], t5, t6)); + t6 = t7 == null ? t6._as(t7) : t7; + $async$temp1 = type$.NativeByteBuffer; + $async$goto = 7; + return A._asyncAwait(A.promiseToFuture(t2._as(t4.encrypt(t6, secretKey, B.NativeUint8List_methods.sublist$2(srcFrame.buffer, headerLength, srcFrame.buffer.length))), type$.nullable_Object), $async$encodeFunction$2); + case 7: + // returning from await. + cipherText = $async$temp1._as($async$result); + t1.log$4(B.Level_FINER_400, "encodeFunction: encrypted buffer: " + srcFrame.buffer.length + ", cipherText: " + A.NativeUint8List_NativeUint8List$view(cipherText, 0, null).length, null, null); + t2 = $.$get$_CopyingBytesBuilder__emptyList(); + finalBuffer = new A._CopyingBytesBuilder(t2); + J.add$1$ax(finalBuffer, new Uint8Array(A._ensureNativeList(B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, headerLength)))); + J.add$1$ax(finalBuffer, A.NativeUint8List_NativeUint8List$view(cipherText, 0, null)); + J.add$1$ax(finalBuffer, iv); + J.add$1$ax(finalBuffer, J.asUint8List$0$x(J.get$buffer$x(frameTrailer))); + $async$self.enqueueFrame$3(frameObj, controller, finalBuffer); + if ($async$self.lastError !== B.CryptorError_1) { + $async$self.lastError = B.CryptorError_1; + t3.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", $async$self.participantIdentity, "trackId", $async$self.trackId, "kind", $async$self.__FrameCryptor_kind_A, "state", "ok", "error", "encryption ok"], t5, type$.nullable_String))); + } + t1.log$4(B.Level_FINER_400, "encodeFunction[CryptorError.kOk]: frame enqueued kind " + $async$self.__FrameCryptor_kind_A + ",codec " + A.S($async$self.codec) + " headerLength: " + A.S(headerLength) + ", timestamp: " + srcFrame.timestamp + ", ssrc: " + srcFrame.ssrc + ", data length: " + srcFrame.buffer.length + ", encrypted length: " + finalBuffer.toBytes$0().length + ", iv " + A.S(iv), null, null); + $async$handler = 2; + // goto after finally + $async$goto = 6; + break; + case 4: + // catch + $async$handler = 3; + $async$exception = $async$errorStack.pop(); + e = A.unwrapException($async$exception); + $.$get$logger().log$4(B.Level_WARNING_900, "encodeFunction encrypt: e " + J.toString$0$(e), null, null); + if ($async$self.lastError !== B.CryptorError_3) { + $async$self.lastError = B.CryptorError_3; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + t3 = $async$self.__FrameCryptor_kind_A; + t3 === $ && A.throwLateFieldNI("kind"); + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", t3, "state", "encryptError", "error", J.toString$0$(e)], type$.String, type$.nullable_String))); + } + // goto after finally + $async$goto = 6; + break; + case 3: + // uncaught + // goto rethrow + $async$goto = 2; + break; + case 6: + // after finally + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + case 2: + // rethrow + return A._asyncRethrow($async$errorStack.at(-1), $async$completer); + } + }); + return A._asyncStartSync($async$encodeFunction$2, $async$completer); + }, + decodeFunction$2(frameObj, controller) { + var t1 = type$.JSObject; + return this.decodeFunction$body$FrameCryptor(t1._as(frameObj), t1._as(controller)); + }, + decodeFunction$body$FrameCryptor(frameObj, controller) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$returnValue, $async$handler = 2, $async$errorStack = [], $async$self = this, headerLength, frameTrailer, ivLength, keyIndex, iv, decryptFrameInternal, ratchedKeyInternal, e, finalBuffer, e0, t2, t3, t4, t5, t6, magicBytesBuffer, t7, initialKeySet, exception, t1, srcFrame, $async$exception, $async$exception1; + var $async$decodeFunction$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) { + $async$errorStack.push($async$result); + $async$goto = $async$handler; + } + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = {}; + srcFrame = $async$self.readFrameInfo$1(frameObj); + t1.ratchetCount = 0; + t2 = $.$get$logger(); + t2.log$4(B.Level_FINE_500, "decodeFunction: frame lenght " + srcFrame.buffer.length, null, null); + t1.initialKeySet = t1.decrypted = null; + t1.initialKeyIndex = $async$self.currentKeyIndex; + if (!$async$self.get$enabled() || srcFrame.buffer.length === 0) { + $async$self.sifGuard.recordUserFrame$0(); + if ($async$self.keyHandler.keyOptions.discardFrameWhenCryptorNotReady) { + // goto return + $async$goto = 1; + break; + } + t2.log$4(B.Level_FINE_500, "enqueing empty frame", null, null); + controller.enqueue(frameObj); + t2.log$4(B.Level_FINER_400, "enqueing silent frame", null, null); + // goto return + $async$goto = 1; + break; + } + t3 = $async$self.keyHandler.keyOptions.uncryptedMagicBytes; + if (t3 != null) { + t4 = srcFrame.buffer; + t5 = t3.length; + t6 = t5 + 1; + if (t4.length > t6) { + magicBytesBuffer = B.NativeUint8List_methods.sublist$2(srcFrame.buffer, srcFrame.buffer.length - t5 - 1, srcFrame.buffer.length - 1); + t2.log$4(B.Level_FINER_400, "magicBytesBuffer " + A.S(magicBytesBuffer) + ", magicBytes " + A.S(t3), null, null); + t4 = $async$self.sifGuard; + if (A.Iterable_iterableToFullString(magicBytesBuffer, "[", "]") === A.Iterable_iterableToFullString(t3, "[", "]")) { + ++t4.consecutiveSifCount; + if (t4.sifSequenceStartedAt == null) + t4.sifSequenceStartedAt = Date.now(); + t4.lastSifReceivedAt = Date.now(); + if (t4.consecutiveSifCount < 100) + if (t4.sifSequenceStartedAt != null) { + t1 = Date.now(); + t4 = t4.sifSequenceStartedAt; + t4.toString; + t4 = t1 - t4 < 2000; + t1 = t4; + } else + t1 = true; + else + t1 = false; + if (t1) { + t1 = B.NativeUint8List_methods.sublist$1(srcFrame.buffer, srcFrame.buffer.length - 1); + if (0 >= t1.length) { + $async$returnValue = A.ioore(t1, 0); + // goto return + $async$goto = 1; + break; + } + t2.log$4(B.Level_FINER_400, "ecodeFunction: skip uncrypted frame, type " + t1[0], null, null); + finalBuffer = new A._CopyingBytesBuilder($.$get$_CopyingBytesBuilder__emptyList()); + finalBuffer.add$1(0, new Uint8Array(A._ensureNativeList(B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, srcFrame.buffer.length - t6)))); + $async$self.enqueueFrame$3(frameObj, controller, finalBuffer); + t2.log$4(B.Level_FINE_500, "ecodeFunction: enqueing silent frame", null, null); + controller.enqueue(frameObj); + } else + t2.log$4(B.Level_FINER_400, "ecodeFunction: SIF limit reached, dropping frame", null, null); + t2.log$4(B.Level_FINER_400, "ecodeFunction: enqueing silent frame", null, null); + controller.enqueue(frameObj); + // goto return + $async$goto = 1; + break; + } else + t4.recordUserFrame$0(); + } + } + $async$handler = 4; + t3 = {}; + t4 = $async$self.__FrameCryptor_kind_A; + t4 === $ && A.throwLateFieldNI("kind"); + headerLength = t4 === "video" ? $async$self.getUnencryptedBytes$2(frameObj, $async$self.codec) : 1; + frameTrailer = B.NativeUint8List_methods.sublist$1(srcFrame.buffer, srcFrame.buffer.length - 2); + ivLength = J.$index$asx(frameTrailer, 0); + keyIndex = J.$index$asx(frameTrailer, 1); + t5 = srcFrame.buffer; + t6 = srcFrame.buffer; + t7 = ivLength; + if (typeof t7 !== "number") { + $async$returnValue = A.iae(t7); + // goto return + $async$goto = 1; + break; + } + iv = B.NativeUint8List_methods.sublist$2(t5, t6.length - t7 - 2, srcFrame.buffer.length - 2); + initialKeySet = t1.initialKeySet = $async$self.keyHandler.getKeySet$1(keyIndex); + t1.initialKeyIndex = keyIndex; + t2.log$4(B.Level_FINER_400, "decodeFunction: start decrypting frame headerLength " + A.S(headerLength) + " " + srcFrame.buffer.length + " frameTrailer " + A.S(frameTrailer) + ", ivLength " + A.S(ivLength) + ", keyIndex " + A.S(keyIndex) + ", iv " + A.S(iv), null, null); + if (initialKeySet == null || !$async$self.keyHandler._hasValidKey) { + if ($async$self.lastError !== B.CryptorError_5) { + $async$self.lastError = B.CryptorError_5; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", $async$self.__FrameCryptor_kind_A, "state", "missingKey", "error", "Missing key for track " + t2], type$.String, type$.nullable_String))); + } + // goto return + $async$goto = 1; + break; + } + t3.currentkeySet = initialKeySet; + decryptFrameInternal = new A.FrameCryptor_decodeFunction_decryptFrameInternal(t1, t3, $async$self, iv, srcFrame, headerLength, ivLength); + ratchedKeyInternal = new A.FrameCryptor_decodeFunction_ratchedKeyInternal(t1, t3, $async$self, decryptFrameInternal); + $async$handler = 8; + $async$goto = 11; + return A._asyncAwait(decryptFrameInternal.call$0(), $async$decodeFunction$2); + case 11: + // returning from await. + $async$handler = 4; + // goto after finally + $async$goto = 10; + break; + case 8: + // catch + $async$handler = 7; + $async$exception = $async$errorStack.pop(); + e = A.unwrapException($async$exception); + $async$self.lastError = B.CryptorError_7; + t2 = $.$get$logger(); + t2.log$4(B.Level_FINER_400, "decodeFunction: kInternalError catch " + A.S(e), null, null); + $async$goto = 12; + return A._asyncAwait(ratchedKeyInternal.call$0(), $async$decodeFunction$2); + case 12: + // returning from await. + // goto after finally + $async$goto = 10; + break; + case 7: + // uncaught + // goto catch + $async$goto = 4; + break; + case 10: + // after finally + t3 = t1.decrypted; + if (t3 == null) { + t1 = A.Exception_Exception("[decodeFunction] decryption failed even after ratchting"); + throw A.wrapException(t1); + } + t4 = $async$self.keyHandler; + t4._decryptionFailureCount = 0; + t4._hasValidKey = true; + t2.log$4(B.Level_FINER_400, "decodeFunction: decryption success, buffer length " + srcFrame.buffer.length + ", decrypted: " + A.NativeUint8List_NativeUint8List$view(t3, 0, null).length, null, null); + t3 = $.$get$_CopyingBytesBuilder__emptyList(); + finalBuffer = new A._CopyingBytesBuilder(t3); + J.add$1$ax(finalBuffer, new Uint8Array(A._ensureNativeList(B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, headerLength)))); + t1 = t1.decrypted; + t1.toString; + J.add$1$ax(finalBuffer, A.NativeUint8List_NativeUint8List$view(t1, 0, null)); + $async$self.enqueueFrame$3(frameObj, controller, finalBuffer); + if ($async$self.lastError !== B.CryptorError_1) { + $async$self.lastError = B.CryptorError_1; + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", $async$self.participantIdentity, "trackId", $async$self.trackId, "kind", $async$self.__FrameCryptor_kind_A, "state", "ok", "error", "decryption ok"], type$.String, type$.nullable_String))); + } + t2.log$4(B.Level_FINE_500, "decodeFunction[CryptorError.kOk]: decryption success kind " + $async$self.__FrameCryptor_kind_A + ", headerLength: " + A.S(headerLength) + ", timestamp: " + srcFrame.timestamp + ", ssrc: " + srcFrame.ssrc + ", data length: " + srcFrame.buffer.length + ", decrypted length: " + finalBuffer.toBytes$0().length + ", keyindex " + A.S(keyIndex) + " iv " + A.S(iv), null, null); + $async$handler = 2; + // goto after finally + $async$goto = 6; + break; + case 4: + // catch + $async$handler = 3; + $async$exception1 = $async$errorStack.pop(); + e0 = A.unwrapException($async$exception1); + if ($async$self.lastError !== B.CryptorError_2) { + $async$self.lastError = B.CryptorError_2; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + t3 = $async$self.__FrameCryptor_kind_A; + t3 === $ && A.throwLateFieldNI("kind"); + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", t3, "state", "decryptError", "error", J.toString$0$(e0)], type$.String, type$.nullable_String))); + } + $async$self.keyHandler.decryptionFailure$0(); + // goto after finally + $async$goto = 6; + break; + case 3: + // uncaught + // goto rethrow + $async$goto = 2; + break; + case 6: + // after finally + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + case 2: + // rethrow + return A._asyncRethrow($async$errorStack.at(-1), $async$completer); + } + }); + return A._asyncStartSync($async$decodeFunction$2, $async$completer); + } + }; + A.FrameCryptor_decodeFunction_decryptFrameInternal.prototype = { + call$0() { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, decrypted, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, $async$temp1; + var $async$call$0 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self.$this; + t2 = t1.worker; + t3 = type$.JSObject; + t4 = t3._as(t3._as(t2.crypto).subtle); + t5 = $async$self.srcFrame; + t6 = t5.buffer; + t7 = $async$self.headerLength; + t8 = type$.String; + t9 = type$.Object; + t10 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "AES-GCM", "iv", $async$self.iv, "additionalData", B.NativeUint8List_methods.sublist$2(t6, 0, t7)], t8, t9)); + t9 = t10 == null ? t9._as(t10) : t10; + t10 = $async$self._box_0; + $async$temp1 = type$.NativeByteBuffer; + $async$goto = 2; + return A._asyncAwait(A.promiseToFuture(t3._as(t4.decrypt(t9, t10.currentkeySet.encryptionKey, B.NativeUint8List_methods.sublist$2(t6, t7, t6.length - $async$self.ivLength - 2))), type$.nullable_Object), $async$call$0); + case 2: + // returning from await. + decrypted = $async$temp1._as($async$result); + t6 = $async$self._box_1; + t6.decrypted = decrypted; + t7 = $.$get$logger(); + t7.log$4(B.Level_FINER_400, string$.decode + A.NativeUint8List_NativeUint8List$view(decrypted, 0, null).length, null, null); + t3 = t6.decrypted; + if (t3 == null) + throw A.wrapException(A.Exception_Exception("[decryptFrameInternal] could not decrypt")); + t7.log$4(B.Level_FINER_400, string$.decode + A.NativeUint8List_NativeUint8List$view(t3, 0, null).length, null, null); + $async$goto = t10.currentkeySet !== t6.initialKeySet ? 3 : 4; + break; + case 3: + // then + t7.log$4(B.Level_FINE_500, "decodeFunction::decryptFrameInternal: ratchetKey: decryption ok, newState: kKeyRatcheted", null, null); + $async$goto = 5; + return A._asyncAwait(t1.keyHandler.setKeySetFromMaterial$2(t10.currentkeySet, t6.initialKeyIndex), $async$call$0); + case 5: + // returning from await. + case 4: + // join + t3 = t1.lastError; + if (t3 !== B.CryptorError_1 && t3 !== B.CryptorError_6 && t6.ratchetCount > 0) { + t7.log$4(B.Level_FINER_400, "decodeFunction::decryptFrameInternal: KeyRatcheted: ssrc " + t5.ssrc + " timestamp " + t5.timestamp + " ratchetCount " + t6.ratchetCount + " participantId: " + A.S(t1.participantIdentity), null, null); + t7.log$4(B.Level_FINER_400, "decodeFunction::decryptFrameInternal: ratchetKey: lastError != CryptorError.kKeyRatcheted, reset state to kKeyRatcheted", null, null); + t1.lastError = B.CryptorError_6; + t3 = t1.participantIdentity; + t4 = t1.trackId; + t1 = t1.__FrameCryptor_kind_A; + t1 === $ && A.throwLateFieldNI("kind"); + t2.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t3, "trackId", t4, "kind", t1, "state", "keyRatcheted", "error", "Key ratcheted ok"], t8, type$.nullable_String))); + } + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$call$0, $async$completer); + }, + $signature: 10 + }; + A.FrameCryptor_decodeFunction_ratchedKeyInternal.prototype = { + call$0() { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, newKeyBuffer, newMaterial, t1, t2, t3, t4, t5, t6, $async$temp1; + var $async$call$0 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self._box_1; + t2 = t1.ratchetCount; + t3 = $async$self.$this; + t4 = t3.keyHandler; + t5 = t4.keyOptions; + t6 = t5.ratchetWindowSize; + if (t2 >= t6 || t6 <= 0) + throw A.wrapException(A.Exception_Exception("[ratchedKeyInternal] cannot ratchet anymore")); + t2 = $async$self._box_0; + $async$goto = 2; + return A._asyncAwait(t4.ratchet$2(t2.currentkeySet.material, t5.ratchetSalt), $async$call$0); + case 2: + // returning from await. + newKeyBuffer = $async$result; + $async$goto = 3; + return A._asyncAwait(t3.keyHandler.ratchetMaterial$2(t2.currentkeySet.material, J.get$buffer$x(newKeyBuffer)), $async$call$0); + case 3: + // returning from await. + newMaterial = $async$result; + t3 = t3.keyHandler; + $async$temp1 = t2; + $async$goto = 4; + return A._asyncAwait(t3.deriveKeys$2(newMaterial, t3.keyOptions.ratchetSalt), $async$call$0); + case 4: + // returning from await. + $async$temp1.currentkeySet = $async$result; + ++t1.ratchetCount; + $async$goto = 5; + return A._asyncAwait($async$self.decryptFrameInternal.call$0(), $async$call$0); + case 5: + // returning from await. + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$call$0, $async$completer); + }, + $signature: 10 + }; + A.KeyOptions.prototype = { + toString$0(_) { + var _this = this; + return "KeyOptions{sharedKey: " + _this.sharedKey + ", ratchetWindowSize: " + _this.ratchetWindowSize + ", failureTolerance: " + _this.failureTolerance + ", uncryptedMagicBytes: " + A.S(_this.uncryptedMagicBytes) + ", ratchetSalt: " + A.S(_this.ratchetSalt) + "}"; + } + }; + A.KeyProvider.prototype = { + getParticipantKeyHandler$1(participantIdentity) { + var t2, keys, _this = this, + t1 = _this.keyProviderOptions; + if (t1.sharedKey) + return _this.getSharedKeyHandler$0(); + t2 = _this.participantKeys; + keys = t2.$index(0, participantIdentity); + if (keys == null) { + keys = A.ParticipantKeyHandler$(t1, participantIdentity, _this.worker); + t1 = _this.sharedKey; + if (t1.length !== 0) + keys.setKey$1(t1); + t2.$indexSet(0, participantIdentity, keys); + } + return keys; + }, + getSharedKeyHandler$0() { + var _this = this, + t1 = _this.sharedKeyHandler; + return t1 == null ? _this.sharedKeyHandler = A.ParticipantKeyHandler$(_this.keyProviderOptions, "shared-key", _this.worker) : t1; + } + }; + A.KeySet.prototype = {}; + A.ParticipantKeyHandler.prototype = { + decryptionFailure$0() { + var _this = this, + t1 = _this.keyOptions.failureTolerance; + if (t1 < 0) + return; + if (++_this._decryptionFailureCount > t1) { + $.$get$logger().log$4(B.Level_WARNING_900, "key for " + _this.participantIdentity + " is being marked as invalid", null, null); + _this._hasValidKey = false; + } + }, + exportKey$1(keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.nullable_Uint8List), + $async$returnValue, $async$handler = 2, $async$errorStack = [], $async$self = this, key, e, exception, t1, currentMaterial, $async$exception, $async$temp1; + var $async$exportKey$1 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) { + $async$errorStack.push($async$result); + $async$goto = $async$handler; + } + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self.getKeySet$1(keyIndex); + currentMaterial = t1 == null ? null : t1.material; + if (currentMaterial == null) { + $async$returnValue = null; + // goto return + $async$goto = 1; + break; + } + $async$handler = 4; + t1 = type$.JSObject; + $async$temp1 = type$.NativeByteBuffer; + $async$goto = 7; + return A._asyncAwait(A.promiseToFuture(t1._as(t1._as(t1._as($async$self.worker.crypto).subtle).exportKey("raw", currentMaterial)), type$.nullable_Object), $async$exportKey$1); + case 7: + // returning from await. + key = $async$temp1._as($async$result); + t1 = A.NativeUint8List_NativeUint8List$view(key, 0, null); + $async$returnValue = t1; + // goto return + $async$goto = 1; + break; + $async$handler = 2; + // goto after finally + $async$goto = 6; + break; + case 4: + // catch + $async$handler = 3; + $async$exception = $async$errorStack.pop(); + e = A.unwrapException($async$exception); + $.$get$logger().log$4(B.Level_WARNING_900, "exportKey: " + A.S(e), null, null); + $async$returnValue = null; + // goto return + $async$goto = 1; + break; + // goto after finally + $async$goto = 6; + break; + case 3: + // uncaught + // goto rethrow + $async$goto = 2; + break; + case 6: + // after finally + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + case 2: + // rethrow + return A._asyncRethrow($async$errorStack.at(-1), $async$completer); + } + }); + return A._asyncStartSync($async$exportKey$1, $async$completer); + }, + ratchetKey$1(keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.nullable_Uint8List), + $async$returnValue, $async$self = this, newKey, newKeySet, t1, currentMaterial; + var $async$ratchetKey$1 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self.getKeySet$1(keyIndex); + currentMaterial = t1 == null ? null : t1.material; + if (currentMaterial == null) { + $async$returnValue = null; + // goto return + $async$goto = 1; + break; + } + t1 = $async$self.keyOptions.ratchetSalt; + $async$goto = 3; + return A._asyncAwait($async$self.ratchet$2(currentMaterial, t1), $async$ratchetKey$1); + case 3: + // returning from await. + newKey = $async$result; + $async$goto = 5; + return A._asyncAwait($async$self.ratchetMaterial$2(currentMaterial, B.NativeUint8List_methods.get$buffer(newKey)), $async$ratchetKey$1); + case 5: + // returning from await. + $async$goto = 4; + return A._asyncAwait($async$self.deriveKeys$2($async$result, t1), $async$ratchetKey$1); + case 4: + // returning from await. + newKeySet = $async$result; + $async$goto = 6; + return A._asyncAwait($async$self.setKeySetFromMaterial$2(newKeySet, keyIndex == null ? $async$self.currentKeyIndex : keyIndex), $async$ratchetKey$1); + case 6: + // returning from await. + $async$returnValue = newKey; + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$ratchetKey$1, $async$completer); + }, + ratchetMaterial$2(currentMaterial, newKeyBuffer) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.JSObject), + $async$returnValue, $async$self = this, t1; + var $async$ratchetMaterial$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = type$.JSObject; + $async$goto = 3; + return A._asyncAwait(A.promiseToFuture(A.callMethod(t1._as(t1._as($async$self.worker.crypto).subtle), "importKey", ["raw", type$.NativeByteBuffer._as(newKeyBuffer), type$.Object._as(t1._as(currentMaterial.algorithm).name), false, type$.JSArray_nullable_Object._as(A.jsify(A._setArrayType(["deriveBits", "deriveKey"], type$.JSArray_String)))], t1), t1), $async$ratchetMaterial$2); + case 3: + // returning from await. + $async$returnValue = $async$result; + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$ratchetMaterial$2, $async$completer); + }, + getKeySet$1(keyIndex) { + var t2, + t1 = this.__ParticipantKeyHandler_cryptoKeyRing_A; + t1 === $ && A.throwLateFieldNI("cryptoKeyRing"); + t2 = keyIndex == null ? this.currentKeyIndex : keyIndex; + if (!(t2 >= 0 && t2 < t1.length)) + return A.ioore(t1, t2); + return t1[t2]; + }, + setKey$2$keyIndex(key, keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, t1, t2, t3; + var $async$setKey$2$keyIndex = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = type$.JSObject; + t2 = t1._as(t1._as($async$self.worker.crypto).subtle); + t3 = type$.String; + t3 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "PBKDF2"], t3, t3)); + if (t3 == null) + t3 = type$.Object._as(t3); + $async$goto = 4; + return A._asyncAwait(A.promiseToFuture(A.callMethod(t2, "importKey", ["raw", key, t3, false, type$.JSArray_nullable_Object._as(A.jsify(A._setArrayType(["deriveBits", "deriveKey"], type$.JSArray_String)))], t1), t1), $async$setKey$2$keyIndex); + case 4: + // returning from await. + $async$goto = 3; + return A._asyncAwait($async$self.deriveKeys$2($async$result, $async$self.keyOptions.ratchetSalt), $async$setKey$2$keyIndex); + case 3: + // returning from await. + $async$goto = 2; + return A._asyncAwait($async$self.setKeySetFromMaterial$2($async$result, keyIndex), $async$setKey$2$keyIndex); + case 2: + // returning from await. + $async$self._decryptionFailureCount = 0; + $async$self._hasValidKey = true; + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$setKey$2$keyIndex, $async$completer); + }, + setKey$1(key) { + return this.setKey$2$keyIndex(key, 0); + }, + setKeySetFromMaterial$2(keySet, keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, t1; + var $async$setKeySetFromMaterial$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + $.$get$logger().log$4(B.Level_CONFIG_700, "setKeySetFromMaterial: set new key, index: " + keyIndex, null, null); + if (keyIndex >= 0) { + t1 = $async$self.__ParticipantKeyHandler_cryptoKeyRing_A; + t1 === $ && A.throwLateFieldNI("cryptoKeyRing"); + $async$self.currentKeyIndex = B.JSInt_methods.$mod(keyIndex, t1.length); + } + t1 = $async$self.__ParticipantKeyHandler_cryptoKeyRing_A; + t1 === $ && A.throwLateFieldNI("cryptoKeyRing"); + B.JSArray_methods.$indexSet(t1, $async$self.currentKeyIndex, keySet); + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$setKeySetFromMaterial$2, $async$completer); + }, + deriveKeys$2(material, salt) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.KeySet), + $async$returnValue, $async$self = this, t4, t5, t1, algorithmOptions, t2, t3, $async$temp1, $async$temp2, $async$temp3; + var $async$deriveKeys$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = type$.JSObject; + algorithmOptions = A.getAlgoOptions(A._asString(t1._as(material.algorithm).name), salt); + t2 = t1._as(t1._as($async$self.worker.crypto).subtle); + t3 = A.jsify(algorithmOptions); + if (t3 == null) + t3 = type$.Object._as(t3); + t4 = type$.Object; + t5 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "AES-GCM", "length", 128], type$.String, t4)); + t4 = t5 == null ? t4._as(t5) : t5; + $async$temp1 = A; + $async$temp2 = material; + $async$temp3 = t1; + $async$goto = 3; + return A._asyncAwait(A.promiseToFuture(A.callMethod(t2, "deriveKey", [t3, material, t4, false, type$.JSArray_nullable_Object._as(A.jsify(A._setArrayType(["encrypt", "decrypt"], type$.JSArray_String)))], t1), type$.nullable_Object), $async$deriveKeys$2); + case 3: + // returning from await. + $async$returnValue = new $async$temp1.KeySet($async$temp2, $async$temp3._as($async$result)); + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$deriveKeys$2, $async$completer); + }, + ratchet$2(material, salt) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.Uint8List), + $async$returnValue, $async$self = this, algorithmOptions, t1, t2, t3, $async$temp1; + var $async$ratchet$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + algorithmOptions = A.getAlgoOptions("PBKDF2", salt); + t1 = type$.JSObject; + t2 = t1._as(t1._as($async$self.worker.crypto).subtle); + t3 = A.jsify(algorithmOptions); + if (t3 == null) + t3 = type$.Object._as(t3); + $async$temp1 = A; + $async$goto = 3; + return A._asyncAwait(A.promiseToFuture(t1._as(t2.deriveBits(t3, material, 256)), type$.NativeByteBuffer), $async$ratchet$2); + case 3: + // returning from await. + $async$returnValue = $async$temp1.NativeUint8List_NativeUint8List$view($async$result, 0, null); + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$ratchet$2, $async$completer); + }, + set$__ParticipantKeyHandler_cryptoKeyRing_A(__ParticipantKeyHandler_cryptoKeyRing_A) { + this.__ParticipantKeyHandler_cryptoKeyRing_A = type$.List_nullable_KeySet._as(__ParticipantKeyHandler_cryptoKeyRing_A); + } + }; + A.SifGuard.prototype = { + recordUserFrame$0() { + var _this = this; + if (_this.sifSequenceStartedAt == null) + return; + if (++_this.userFramesSinceSif > _this.consecutiveSifCount || Date.now() - _this.lastSifReceivedAt > 2000) + _this.reset$0(); + }, + reset$0() { + this.consecutiveSifCount = this.userFramesSinceSif = 0; + this.sifSequenceStartedAt = null; + } + }; + A.getTrackCryptor_closure.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.unsetCryptorParticipant_closure.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main_closure.prototype = { + call$1(record) { + type$.LogRecord._as(record); + A.printString("[" + record.loggerName + "] " + record.level.name + ": " + record.message); + }, + $signature: 21 + }; + A.main_closure0.prototype = { + call$1($event) { + var t2, transformer, options, kind, participantId, trackId, codec, msgType, keyProviderId, keyProvider, cryptor, _null = null, + t1 = type$.JSObject; + t1._as($event); + t2 = $.$get$logger(); + t2.log$4(B.Level_INFO_800, "Got onrtctransform event", _null, _null); + transformer = t1._as($event.transformer); + transformer.handled = true; + options = t1._as(transformer.options); + kind = A._asString(options.kind); + participantId = A._asString(options.participantId); + trackId = A._asString(options.trackId); + codec = A._asStringQ(options.codec); + msgType = A._asString(options.msgType); + keyProviderId = A._asString(options.keyProviderId); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t2.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, _null, _null); + return; + } + cryptor = A.getTrackCryptor(participantId, trackId, keyProvider); + t2 = t1._as(transformer.readable); + t1 = t1._as(transformer.writable); + cryptor.setupTransform$6$codec$kind$operation$readable$trackId$writable(codec == null ? _null : codec, kind, msgType, t2, trackId, t1); + }, + $signature: 11 + }; + A.main_closure2.prototype = { + call$1(e) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.Null), + $async$returnValue, options, keyProviderId, t2, t3, t4, t5, t6, t7, t8, keyProviderOptions, enabled, trackId, cryptors, _i, cryptor, kind, exist, participantId, readable, writable, keyProvider, key, keyIndex, newKey, c, sifTrailer, codec, msg, msgType, msgId, t1; + var $async$call$1 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + msg = type$.Map_dynamic_dynamic._as(A.dartify(e.data)); + msgType = msg.$index(0, "msgType"); + msgId = A._asStringQ(msg.$index(0, "msgId")); + t1 = $.$get$logger(); + t1.log$4(B.Level_CONFIG_700, "Got message " + A.S(msgType) + ", msgId " + A.S(msgId), null, null); + case 3: + // switch + switch (msgType) { + case "keyProviderInit": + // goto case + $async$goto = 5; + break; + case "keyProviderDispose": + // goto case + $async$goto = 6; + break; + case "enable": + // goto case + $async$goto = 7; + break; + case "decode": + // goto case + $async$goto = 8; + break; + case "encode": + // goto case + $async$goto = 9; + break; + case "removeTransform": + // goto case + $async$goto = 10; + break; + case "setKey": + // goto case + $async$goto = 11; + break; + case "setSharedKey": + // goto case + $async$goto = 12; + break; + case "ratchetKey": + // goto case + $async$goto = 13; + break; + case "ratchetSharedKey": + // goto case + $async$goto = 14; + break; + case "setKeyIndex": + // goto case + $async$goto = 15; + break; + case "exportKey": + // goto case + $async$goto = 16; + break; + case "exportSharedKey": + // goto case + $async$goto = 17; + break; + case "setSifTrailer": + // goto case + $async$goto = 18; + break; + case "updateCodec": + // goto case + $async$goto = 19; + break; + case "dispose": + // goto case + $async$goto = 20; + break; + default: + // goto default + $async$goto = 21; + break; + } + break; + case 5: + // case + options = msg.$index(0, "keyOptions"); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + t2 = J.getInterceptor$asx(options); + t3 = A._asBool(t2.$index(options, "sharedKey")); + t4 = new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(t2.$index(options, "ratchetSalt"))))); + t5 = A._asInt(t2.$index(options, "ratchetWindowSize")); + t6 = t2.$index(options, "failureTolerance"); + t6 = A._asInt(t6 == null ? -1 : t6); + t7 = t2.$index(options, "uncryptedMagicBytes") != null ? new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(t2.$index(options, "uncryptedMagicBytes"))))) : null; + t8 = t2.$index(options, "keyRingSize"); + t8 = A._asInt(t8 == null ? 16 : t8); + t2 = t2.$index(options, "discardFrameWhenCryptorNotReady"); + keyProviderOptions = new A.KeyOptions(t3, t4, t5, t6, t7, t8, A._asBool(t2 == null ? false : t2)); + t1.log$4(B.Level_CONFIG_700, "Init with keyProviderOptions:\n " + keyProviderOptions.toString$0(0), null, null); + t1 = self; + t2 = type$.JSObject; + t3 = t2._as(t1.self); + t4 = type$.String; + t5 = new Uint8Array(0); + $.keyProviders.$indexSet(0, keyProviderId, new A.KeyProvider(t3, keyProviderOptions, A.LinkedHashMap_LinkedHashMap$_empty(t4, type$.ParticipantKeyHandler), t5)); + t2._as(t1.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "init", "msgId", msgId, "msgType", "response"], t4, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 6: + // case + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + t1.log$4(B.Level_CONFIG_700, "Dispose keyProvider " + keyProviderId, null, null); + $.keyProviders.remove$1(0, keyProviderId); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "dispose", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 7: + // case + enabled = A._asBool(msg.$index(0, "enabled")); + trackId = A._asString(msg.$index(0, "trackId")); + t2 = $.participantCryptors; + t3 = A._arrayInstanceType(t2); + t4 = t3._eval$1("WhereIterable<1>"); + cryptors = A.List_List$of(new A.WhereIterable(t2, t3._eval$1("bool(1)")._as(new A.main__closure(trackId)), t4), true, t4._eval$1("Iterable.E")); + for (t2 = cryptors.length, t3 = "" + enabled, t4 = "Set enable " + t3 + " for trackId ", t5 = "setEnabled[" + t3 + string$.___las, _i = 0; _i < t2; ++_i) { + cryptor = cryptors[_i]; + t1.log$4(B.Level_CONFIG_700, t4 + cryptor.trackId, null, null); + if (cryptor.lastError !== B.CryptorError_1) { + t1.log$4(B.Level_INFO_800, t5, null, null); + cryptor.lastError = B.CryptorError_0; + } + t1.log$4(B.Level_CONFIG_700, "setEnabled for " + A.S(cryptor.participantIdentity) + ", enabled: " + t3, null, null); + cryptor._enabled = enabled; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorEnabled", "enable", enabled, "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_Object))); + // goto after switch + $async$goto = 4; + break; + case 8: + // case + case 9: + // case + kind = msg.$index(0, "kind"); + exist = A._asBool(msg.$index(0, "exist")); + participantId = A._asString(msg.$index(0, "participantId")); + trackId = msg.$index(0, "trackId"); + t2 = type$.JSObject; + readable = t2._as(msg.$index(0, "readableStream")); + writable = t2._as(msg.$index(0, "writableStream")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + t1.log$4(B.Level_CONFIG_700, "SetupTransform for kind " + A.S(kind) + ", trackId " + A.S(trackId) + ", participantId " + participantId + ", " + J.get$runtimeType$(readable).toString$0(0) + " " + J.get$runtimeType$(writable).toString$0(0) + "}", null, null); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + t2._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorSetup", "participantId", participantId, "trackId", trackId, "exist", exist, "operation", msgType, "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto return + $async$goto = 1; + break; + } + A._asString(trackId); + cryptor = A.getTrackCryptor(participantId, trackId, keyProvider); + A._asString(msgType); + $async$goto = 22; + return A._asyncAwait(cryptor.setupTransform$5$kind$operation$readable$trackId$writable(A._asString(kind), msgType, readable, trackId, writable), $async$call$1); + case 22: + // returning from await. + t2._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorSetup", "participantId", participantId, "trackId", trackId, "exist", exist, "operation", msgType, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + cryptor.lastError = B.CryptorError_0; + // goto after switch + $async$goto = 4; + break; + case 10: + // case + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Removing trackId " + trackId, null, null); + A.unsetCryptorParticipant(trackId); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorRemoved", "trackId", trackId, "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 11: + // case + case 12: + // case + key = new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(msg.$index(0, "key"))))); + keyIndex = A._asInt(msg.$index(0, "keyIndex")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + t2 = keyProvider.keyProviderOptions.sharedKey; + t3 = "" + keyIndex; + $async$goto = t2 ? 23 : 25; + break; + case 23: + // then + t1.log$4(B.Level_CONFIG_700, "Set SharedKey keyIndex " + t3, null, null); + t1.log$4(B.Level_INFO_800, "setting shared key", null, null); + keyProvider.sharedKey = key; + keyProvider.getSharedKeyHandler$0().setKey$2$keyIndex(key, keyIndex); + // goto join + $async$goto = 24; + break; + case 25: + // else + participantId = A._asString(msg.$index(0, "participantId")); + t1.log$4(B.Level_CONFIG_700, "Set key for participant " + participantId + ", keyIndex " + t3, null, null); + $async$goto = 26; + return A._asyncAwait(keyProvider.getParticipantKeyHandler$1(participantId).setKey$2$keyIndex(key, keyIndex), $async$call$1); + case 26: + // returning from await. + case 24: + // join + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "participantId", msg.$index(0, "participantId"), "sharedKey", t2, "keyIndex", keyIndex, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto after switch + $async$goto = 4; + break; + case 13: + // case + case 14: + // case + keyIndex = msg.$index(0, "keyIndex"); + participantId = A._asString(msg.$index(0, "participantId")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + t2 = keyProvider.keyProviderOptions.sharedKey; + $async$goto = t2 ? 27 : 29; + break; + case 27: + // then + t1.log$4(B.Level_CONFIG_700, "RatchetKey for SharedKey, keyIndex " + A.S(keyIndex), null, null); + $async$goto = 30; + return A._asyncAwait(keyProvider.getSharedKeyHandler$0().ratchetKey$1(A._asIntQ(keyIndex)), $async$call$1); + case 30: + // returning from await. + newKey = $async$result; + // goto join + $async$goto = 28; + break; + case 29: + // else + t1.log$4(B.Level_CONFIG_700, "RatchetKey for participant " + participantId + ", keyIndex " + A.S(keyIndex), null, null); + $async$goto = 31; + return A._asyncAwait(keyProvider.getParticipantKeyHandler$1(participantId).ratchetKey$1(A._asIntQ(keyIndex)), $async$call$1); + case 31: + // returning from await. + newKey = $async$result; + case 28: + // join + t1 = type$.JSObject._as(self.self); + t1.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "ratchetKey", "sharedKey", t2, "participantId", participantId, "newKey", newKey != null ? B.C_Base64Encoder.convert$1(type$.Base64Codec._eval$1("Codec.S")._as(newKey)) : "", "keyIndex", keyIndex, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto after switch + $async$goto = 4; + break; + case 15: + // case + keyIndex = msg.$index(0, "index"); + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Setup key index for track " + trackId, null, null); + t2 = $.participantCryptors; + t3 = A._arrayInstanceType(t2); + t4 = t3._eval$1("WhereIterable<1>"); + cryptors = A.List_List$of(new A.WhereIterable(t2, t3._eval$1("bool(1)")._as(new A.main__closure0(trackId)), t4), true, t4._eval$1("Iterable.E")); + for (t2 = cryptors.length, _i = 0; _i < t2; ++_i) { + c = cryptors[_i]; + t1.log$4(B.Level_CONFIG_700, "Set keyIndex for trackId " + c.trackId, null, null); + A._asInt(keyIndex); + if (c.lastError !== B.CryptorError_1) { + t1.log$4(B.Level_INFO_800, "setKeyIndex: lastError != CryptorError.kOk, reset state to kNew", null, null); + c.lastError = B.CryptorError_0; + } + t1.log$4(B.Level_CONFIG_700, "setKeyIndex for " + A.S(c.participantIdentity) + ", newIndex: " + keyIndex, null, null); + c.currentKeyIndex = keyIndex; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKeyIndex", "keyIndex", keyIndex, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto after switch + $async$goto = 4; + break; + case 16: + // case + case 17: + // case + keyIndex = A._asInt(msg.$index(0, "keyIndex")); + participantId = A._asString(msg.$index(0, "participantId")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + t2 = "" + keyIndex; + $async$goto = keyProvider.keyProviderOptions.sharedKey ? 32 : 34; + break; + case 32: + // then + t1.log$4(B.Level_CONFIG_700, "Export SharedKey keyIndex " + t2, null, null); + $async$goto = 35; + return A._asyncAwait(keyProvider.getSharedKeyHandler$0().exportKey$1(keyIndex), $async$call$1); + case 35: + // returning from await. + key = $async$result; + // goto join + $async$goto = 33; + break; + case 34: + // else + t1.log$4(B.Level_CONFIG_700, "Export key for participant " + participantId + ", keyIndex " + t2, null, null); + $async$goto = 36; + return A._asyncAwait(keyProvider.getParticipantKeyHandler$1(participantId).exportKey$1(keyIndex), $async$call$1); + case 36: + // returning from await. + key = $async$result; + case 33: + // join + t1 = type$.JSObject._as(self.self); + t1.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "exportKey", "participantId", participantId, "keyIndex", keyIndex, "exportedKey", key != null ? B.C_Base64Encoder.convert$1(type$.Base64Codec._eval$1("Codec.S")._as(key)) : "", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_Object))); + // goto after switch + $async$goto = 4; + break; + case 18: + // case + sifTrailer = new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(msg.$index(0, "sifTrailer"))))); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + keyProvider.keyProviderOptions.uncryptedMagicBytes = sifTrailer; + t1.log$4(B.Level_CONFIG_700, "SetSifTrailer = " + A.S(sifTrailer), null, null); + for (t2 = $.participantCryptors, t3 = t2.length, _i = 0; _i < t2.length; t2.length === t3 || (0, A.throwConcurrentModificationError)(t2), ++_i) { + c = t2[_i]; + t1.log$4(B.Level_CONFIG_700, "setSifTrailer for " + A.S(c.participantIdentity) + ", magicBytes: " + A.S(sifTrailer), null, null); + c.keyHandler.keyOptions.uncryptedMagicBytes = sifTrailer; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setSifTrailer", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 19: + // case + codec = A._asString(msg.$index(0, "codec")); + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Update codec for trackId " + trackId + ", codec " + codec, null, null); + cryptor = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.main__closure1(trackId), type$.FrameCryptor); + if (cryptor != null) { + if (cryptor.lastError !== B.CryptorError_1) { + t1.log$4(B.Level_INFO_800, "updateCodec[" + codec + string$.___las, null, null); + cryptor.lastError = B.CryptorError_0; + } + t1.log$4(B.Level_CONFIG_700, "updateCodec for " + A.S(cryptor.participantIdentity) + ", codec: " + codec, null, null); + cryptor.codec = codec; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "updateCodec", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 20: + // case + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Dispose for trackId " + trackId, null, null); + cryptor = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.main__closure2(trackId), type$.FrameCryptor); + t1 = type$.JSObject; + t2 = type$.String; + t3 = type$.nullable_String; + if (cryptor != null) { + cryptor.lastError = B.CryptorError_8; + t1._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorDispose", "participantId", cryptor.participantIdentity, "trackId", trackId, "msgId", msgId, "msgType", "response"], t2, t3))); + } else + t1._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorDispose", "error", "cryptor not found", "msgId", msgId, "msgType", "response"], t2, t3))); + // goto after switch + $async$goto = 4; + break; + case 21: + // default + t1.log$4(B.Level_WARNING_900, "Unknown message kind " + msg.toString$0(0), null, null); + case 4: + // after switch + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$call$1, $async$completer); + }, + $signature: 22 + }; + A.main__closure.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main__closure0.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main__closure1.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main__closure2.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main_closure1.prototype = { + call$1(e) { + this.handleMessage.call$1(type$.JSObject._as(e)); + }, + $signature: 11 + }; + A.Level.prototype = { + $eq(_, other) { + if (other == null) + return false; + return other instanceof A.Level && this.value === other.value; + }, + get$hashCode(_) { + return this.value; + }, + toString$0(_) { + return this.name; + } + }; + A.LogRecord.prototype = { + toString$0(_) { + return "[" + this.level.name + "] " + this.loggerName + ": " + this.message; + } + }; + A.Logger.prototype = { + get$fullName() { + var t1 = this.parent, + t2 = t1 == null ? null : t1.name.length !== 0, + t3 = this.name; + return t2 === true ? t1.get$fullName() + "." + t3 : t3; + }, + get$level() { + var t1, effectiveLevel; + if (this.parent == null) { + t1 = this._level; + t1.toString; + effectiveLevel = t1; + } else { + t1 = $.$get$Logger_root()._level; + t1.toString; + effectiveLevel = t1; + } + return effectiveLevel; + }, + log$4(logLevel, message, error, stackTrace) { + var record, _this = this, + t1 = logLevel.value; + if (t1 >= _this.get$level().value) { + if (t1 >= 2000) { + A.StackTrace_current(); + logLevel.toString$0(0); + } + t1 = _this.get$fullName(); + Date.now(); + $.LogRecord__nextNumber = $.LogRecord__nextNumber + 1; + record = new A.LogRecord(logLevel, message, t1); + if (_this.parent == null) + _this._publish$1(record); + else + $.$get$Logger_root()._publish$1(record); + } + }, + _getStream$0() { + if (this.parent == null) { + var t1 = this._controller; + if (t1 == null) { + t1 = new A._SyncBroadcastStreamController(null, null, type$._SyncBroadcastStreamController_LogRecord); + this.set$_controller(t1); + } + return new A._BroadcastStream(t1, A._instanceType(t1)._eval$1("_BroadcastStream<1>")); + } else + return $.$get$Logger_root()._getStream$0(); + }, + _publish$1(record) { + var t1 = this._controller; + if (t1 != null) { + A._instanceType(t1)._precomputed1._as(record); + if (!t1.get$_mayAddEvent()) + A.throwExpression(t1._addEventError$0()); + t1._sendData$1(record); + } + return null; + }, + set$_controller(_controller) { + this._controller = type$.nullable_StreamController_LogRecord._as(_controller); + } + }; + A.Logger_Logger_closure.prototype = { + call$0() { + var dot, $parent, t1, + thisName = this.name; + if (B.JSString_methods.startsWith$1(thisName, ".")) + A.throwExpression(A.ArgumentError$("name shouldn't start with a '.'", null)); + if (B.JSString_methods.endsWith$1(thisName, ".")) + A.throwExpression(A.ArgumentError$("name shouldn't end with a '.'", null)); + dot = B.JSString_methods.lastIndexOf$1(thisName, "."); + if (dot === -1) + $parent = thisName !== "" ? A.Logger_Logger("") : null; + else { + $parent = A.Logger_Logger(B.JSString_methods.substring$2(thisName, 0, dot)); + thisName = B.JSString_methods.substring$1(thisName, dot + 1); + } + t1 = new A.Logger(thisName, $parent, A.LinkedHashMap_LinkedHashMap$_empty(type$.String, type$.Logger)); + if ($parent == null) + t1._level = B.Level_INFO_800; + else + $parent._children.$indexSet(0, thisName, t1); + return t1; + }, + $signature: 23 + }; + (function aliases() { + var _ = J.LegacyJavaScriptObject.prototype; + _.super$LegacyJavaScriptObject$toString = _.toString$0; + _ = A._BroadcastStreamController.prototype; + _.super$_BroadcastStreamController$_addEventError = _._addEventError$0; + })(); + (function installTearOffs() { + var _static_1 = hunkHelpers._static_1, + _static_0 = hunkHelpers._static_0, + _static_2 = hunkHelpers._static_2, + _instance_2_u = hunkHelpers._instance_2u, + _instance_0_u = hunkHelpers._instance_0u; + _static_1(A, "async__AsyncRun__scheduleImmediateJsOverride$closure", "_AsyncRun__scheduleImmediateJsOverride", 4); + _static_1(A, "async__AsyncRun__scheduleImmediateWithSetImmediate$closure", "_AsyncRun__scheduleImmediateWithSetImmediate", 4); + _static_1(A, "async__AsyncRun__scheduleImmediateWithTimer$closure", "_AsyncRun__scheduleImmediateWithTimer", 4); + _static_0(A, "async___startMicrotaskLoop$closure", "_startMicrotaskLoop", 0); + _static_2(A, "async___nullErrorHandler$closure", "_nullErrorHandler", 6); + _static_0(A, "async___nullDoneHandler$closure", "_nullDoneHandler", 0); + _instance_2_u(A._Future.prototype, "get$_completeError", "_completeError$2", 6); + _instance_0_u(A._DoneStreamSubscription.prototype, "get$_onMicrotask", "_onMicrotask$0", 0); + var _; + _instance_2_u(_ = A.FrameCryptor.prototype, "get$encodeFunction", "encodeFunction$2", 9); + _instance_2_u(_, "get$decodeFunction", "decodeFunction$2", 9); + })(); + (function inheritance() { + var _mixin = hunkHelpers.mixin, + _inherit = hunkHelpers.inherit, + _inheritMany = hunkHelpers.inheritMany; + _inherit(A.Object, null); + _inheritMany(A.Object, [A.JS_CONST, J.Interceptor, J.ArrayIterator, A._CopyingBytesBuilder, A.Error, A.SentinelValue, A.Iterable, A.ListIterator, A.MappedIterator, A.WhereIterator, A.FixedLengthListMixin, A.Symbol, A.MapView, A.ConstantMap, A._KeysOrValuesOrElementsIterator, A.JSInvocationMirror, A.Closure, A.TypeErrorDecoder, A.NullThrownFromJavaScriptException, A.ExceptionAndStackTrace, A._StackTrace, A._Required, A.MapBase, A.LinkedHashMapCell, A.LinkedHashMapKeyIterator, A._UnmodifiableNativeByteBufferView, A.Rti, A._FunctionParameters, A._Type, A._TimerImpl, A._AsyncAwaitCompleter, A.AsyncError, A.Stream, A._BufferingStreamSubscription, A._BroadcastStreamController, A._Completer, A._FutureListener, A._Future, A._AsyncCallbackEntry, A._DelayedEvent, A._PendingEvents, A._DoneStreamSubscription, A._StreamIterator, A._Zone, A._HashMapKeyIterator, A.ListBase, A._UnmodifiableMapMixin, A.Codec, A.Converter, A._Base64Encoder, A._Base64Decoder, A.DateTime, A._Enum, A.OutOfMemoryError, A.StackOverflowError, A._Exception, A.FormatException, A.Null, A._StringStackTrace, A.StringBuffer, A.NullRejectionException, A._JSSecureRandom, A.FrameInfo, A.FrameCryptor, A.KeyOptions, A.KeyProvider, A.KeySet, A.ParticipantKeyHandler, A.SifGuard, A.Level, A.LogRecord, A.Logger]); + _inheritMany(J.Interceptor, [J.JSBool, J.JSNull, J.JavaScriptObject, J.JavaScriptBigInt, J.JavaScriptSymbol, J.JSNumber, J.JSString]); + _inheritMany(J.JavaScriptObject, [J.LegacyJavaScriptObject, J.JSArray, A.NativeByteBuffer, A.NativeTypedData]); + _inheritMany(J.LegacyJavaScriptObject, [J.PlainJavaScriptObject, J.UnknownJavaScriptObject, J.JavaScriptFunction]); + _inherit(J.JSUnmodifiableArray, J.JSArray); + _inheritMany(J.JSNumber, [J.JSInt, J.JSNumNotInt]); + _inheritMany(A.Error, [A.LateError, A.TypeError, A.JsNoSuchMethodError, A.UnknownJsTypeError, A._CyclicInitializationError, A.RuntimeError, A.AssertionError, A._Error, A.ArgumentError, A.NoSuchMethodError, A.UnsupportedError, A.UnimplementedError, A.StateError, A.ConcurrentModificationError]); + _inheritMany(A.Iterable, [A.EfficientLengthIterable, A.MappedIterable, A.WhereIterable, A._KeysOrValues]); + _inheritMany(A.EfficientLengthIterable, [A.ListIterable, A.LinkedHashMapKeysIterable, A._HashMapKeyIterable]); + _inherit(A.EfficientLengthMappedIterable, A.MappedIterable); + _inherit(A.MappedListIterable, A.ListIterable); + _inherit(A._UnmodifiableMapView_MapView__UnmodifiableMapMixin, A.MapView); + _inherit(A.UnmodifiableMapView, A._UnmodifiableMapView_MapView__UnmodifiableMapMixin); + _inherit(A.ConstantMapView, A.UnmodifiableMapView); + _inherit(A.ConstantStringMap, A.ConstantMap); + _inheritMany(A.Closure, [A.Closure2Args, A.Closure0Args, A.TearOffClosure, A.initHooks_closure, A.initHooks_closure1, A._AsyncRun__initializeScheduleImmediate_internalCallback, A._AsyncRun__initializeScheduleImmediate_closure, A._awaitOnObject_closure, A._SyncBroadcastStreamController__sendData_closure, A._Future__chainForeignFuture_closure, A._Future__propagateToListeners_handleWhenCompleteCallback_closure, A.Stream_length_closure, A.jsify__convert, A.promiseToFuture_closure, A.promiseToFuture_closure0, A.dartify_convert, A.getTrackCryptor_closure, A.unsetCryptorParticipant_closure, A.main_closure, A.main_closure0, A.main_closure2, A.main__closure, A.main__closure0, A.main__closure1, A.main__closure2, A.main_closure1]); + _inheritMany(A.Closure2Args, [A.Primitives_functionNoSuchMethod_closure, A.initHooks_closure0, A._awaitOnObject_closure0, A._wrapJsFunctionForAsync_closure, A._Future__chainForeignFuture_closure0, A._Future__propagateToListeners_handleWhenCompleteCallback_closure0, A.MapBase_mapToString_closure, A.NoSuchMethodError_toString_closure]); + _inherit(A.NullError, A.TypeError); + _inheritMany(A.TearOffClosure, [A.StaticClosure, A.BoundClosure]); + _inherit(A._AssertionError, A.AssertionError); + _inheritMany(A.MapBase, [A.JsLinkedHashMap, A._HashMap]); + _inheritMany(A.NativeTypedData, [A.NativeByteData, A.NativeTypedArray]); + _inheritMany(A.NativeTypedArray, [A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin, A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin]); + _inherit(A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin, A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin); + _inherit(A.NativeTypedArrayOfDouble, A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin); + _inherit(A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin, A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin); + _inherit(A.NativeTypedArrayOfInt, A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin); + _inheritMany(A.NativeTypedArrayOfDouble, [A.NativeFloat32List, A.NativeFloat64List]); + _inheritMany(A.NativeTypedArrayOfInt, [A.NativeInt16List, A.NativeInt32List, A.NativeInt8List, A.NativeUint16List, A.NativeUint32List, A.NativeUint8ClampedList, A.NativeUint8List]); + _inherit(A._TypeError, A._Error); + _inheritMany(A.Closure0Args, [A._AsyncRun__scheduleImmediateJsOverride_internalCallback, A._AsyncRun__scheduleImmediateWithSetImmediate_internalCallback, A._TimerImpl_internalCallback, A._Future__addListener_closure, A._Future__prependListeners_closure, A._Future__chainForeignFuture_closure1, A._Future__chainCoreFuture_closure, A._Future__asyncCompleteWithValue_closure, A._Future__asyncCompleteError_closure, A._Future__propagateToListeners_handleWhenCompleteCallback, A._Future__propagateToListeners_handleValueCallback, A._Future__propagateToListeners_handleError, A.Stream_length_closure0, A._PendingEvents_schedule_closure, A._rootHandleError_closure, A._RootZone_bindCallbackGuarded_closure, A.FrameCryptor_decodeFunction_decryptFrameInternal, A.FrameCryptor_decodeFunction_ratchedKeyInternal, A.Logger_Logger_closure]); + _inherit(A._StreamImpl, A.Stream); + _inherit(A._ControllerStream, A._StreamImpl); + _inherit(A._BroadcastStream, A._ControllerStream); + _inherit(A._ControllerSubscription, A._BufferingStreamSubscription); + _inherit(A._BroadcastSubscription, A._ControllerSubscription); + _inherit(A._SyncBroadcastStreamController, A._BroadcastStreamController); + _inherit(A._AsyncCompleter, A._Completer); + _inherit(A._DelayedData, A._DelayedEvent); + _inherit(A._RootZone, A._Zone); + _inherit(A._IdentityHashMap, A._HashMap); + _inherit(A.Base64Codec, A.Codec); + _inheritMany(A.Converter, [A.Base64Encoder, A.Base64Decoder]); + _inheritMany(A.ArgumentError, [A.RangeError, A.IndexError]); + _inherit(A.CryptorError, A._Enum); + _mixin(A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin, A.ListBase); + _mixin(A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin, A.FixedLengthListMixin); + _mixin(A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin, A.ListBase); + _mixin(A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin, A.FixedLengthListMixin); + _mixin(A._UnmodifiableMapView_MapView__UnmodifiableMapMixin, A._UnmodifiableMapMixin); + })(); + var init = { + typeUniverse: {eC: new Map(), tR: {}, eT: {}, tPV: {}, sEA: []}, + mangledGlobalNames: {int: "int", double: "double", num: "num", String: "String", bool: "bool", Null: "Null", List: "List", Object: "Object", Map: "Map"}, + mangledNames: {}, + types: ["~()", "bool(FrameCryptor)", "Null(@)", "~(@)", "~(~())", "Null()", "~(Object,StackTrace)", "Null(Object,StackTrace)", "Object?(Object?)", "Future<~>(JSObject,JSObject)", "Future<~>()", "Null(JSObject)", "~(String,@)", "@(@)", "@(@,String)", "@(String)", "Null(~())", "Null(@,StackTrace)", "~(int,@)", "~(Object?,Object?)", "~(Symbol0,@)", "~(LogRecord)", "Future(JSObject)", "Logger()"], + interceptorsByTag: null, + leafTags: null, + arrayRti: Symbol("$ti") + }; + A._Universe_addRules(init.typeUniverse, JSON.parse('{"JavaScriptFunction":"LegacyJavaScriptObject","PlainJavaScriptObject":"LegacyJavaScriptObject","UnknownJavaScriptObject":"LegacyJavaScriptObject","JSBool":{"bool":[],"TrustedGetRuntimeType":[]},"JSNull":{"Null":[],"TrustedGetRuntimeType":[]},"JavaScriptObject":{"JSObject":[]},"LegacyJavaScriptObject":{"JSObject":[]},"JSArray":{"List":["1"],"EfficientLengthIterable":["1"],"JSObject":[],"Iterable":["1"]},"JSUnmodifiableArray":{"JSArray":["1"],"List":["1"],"EfficientLengthIterable":["1"],"JSObject":[],"Iterable":["1"]},"ArrayIterator":{"Iterator":["1"]},"JSNumber":{"double":[],"num":[]},"JSInt":{"double":[],"int":[],"num":[],"TrustedGetRuntimeType":[]},"JSNumNotInt":{"double":[],"num":[],"TrustedGetRuntimeType":[]},"JSString":{"String":[],"Pattern":[],"TrustedGetRuntimeType":[]},"_CopyingBytesBuilder":{"BytesBuilder":[]},"LateError":{"Error":[]},"EfficientLengthIterable":{"Iterable":["1"]},"ListIterable":{"EfficientLengthIterable":["1"],"Iterable":["1"]},"ListIterator":{"Iterator":["1"]},"MappedIterable":{"Iterable":["2"],"Iterable.E":"2"},"EfficientLengthMappedIterable":{"MappedIterable":["1","2"],"EfficientLengthIterable":["2"],"Iterable":["2"],"Iterable.E":"2"},"MappedIterator":{"Iterator":["2"]},"MappedListIterable":{"ListIterable":["2"],"EfficientLengthIterable":["2"],"Iterable":["2"],"Iterable.E":"2","ListIterable.E":"2"},"WhereIterable":{"Iterable":["1"],"Iterable.E":"1"},"WhereIterator":{"Iterator":["1"]},"Symbol":{"Symbol0":[]},"ConstantMapView":{"UnmodifiableMapView":["1","2"],"_UnmodifiableMapView_MapView__UnmodifiableMapMixin":["1","2"],"MapView":["1","2"],"_UnmodifiableMapMixin":["1","2"],"Map":["1","2"]},"ConstantMap":{"Map":["1","2"]},"ConstantStringMap":{"ConstantMap":["1","2"],"Map":["1","2"]},"_KeysOrValues":{"Iterable":["1"],"Iterable.E":"1"},"_KeysOrValuesOrElementsIterator":{"Iterator":["1"]},"JSInvocationMirror":{"Invocation":[]},"NullError":{"TypeError":[],"Error":[]},"JsNoSuchMethodError":{"Error":[]},"UnknownJsTypeError":{"Error":[]},"_StackTrace":{"StackTrace":[]},"Closure":{"Function":[]},"Closure0Args":{"Function":[]},"Closure2Args":{"Function":[]},"TearOffClosure":{"Function":[]},"StaticClosure":{"Function":[]},"BoundClosure":{"Function":[]},"_CyclicInitializationError":{"Error":[]},"RuntimeError":{"Error":[]},"_AssertionError":{"Error":[]},"JsLinkedHashMap":{"MapBase":["1","2"],"LinkedHashMap":["1","2"],"Map":["1","2"]},"LinkedHashMapKeysIterable":{"EfficientLengthIterable":["1"],"Iterable":["1"],"Iterable.E":"1"},"LinkedHashMapKeyIterator":{"Iterator":["1"]},"NativeByteBuffer":{"JSObject":[],"ByteBuffer":[],"TrustedGetRuntimeType":[]},"NativeTypedData":{"JSObject":[]},"_UnmodifiableNativeByteBufferView":{"ByteBuffer":[]},"NativeByteData":{"ByteData":[],"JSObject":[],"TrustedGetRuntimeType":[]},"NativeTypedArray":{"JavaScriptIndexingBehavior":["1"],"JSObject":[]},"NativeTypedArrayOfDouble":{"ListBase":["double"],"NativeTypedArray":["double"],"List":["double"],"JavaScriptIndexingBehavior":["double"],"EfficientLengthIterable":["double"],"JSObject":[],"Iterable":["double"],"FixedLengthListMixin":["double"]},"NativeTypedArrayOfInt":{"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"]},"NativeFloat32List":{"Float32List":[],"ListBase":["double"],"NativeTypedArray":["double"],"List":["double"],"JavaScriptIndexingBehavior":["double"],"EfficientLengthIterable":["double"],"JSObject":[],"Iterable":["double"],"FixedLengthListMixin":["double"],"TrustedGetRuntimeType":[],"ListBase.E":"double"},"NativeFloat64List":{"Float64List":[],"ListBase":["double"],"NativeTypedArray":["double"],"List":["double"],"JavaScriptIndexingBehavior":["double"],"EfficientLengthIterable":["double"],"JSObject":[],"Iterable":["double"],"FixedLengthListMixin":["double"],"TrustedGetRuntimeType":[],"ListBase.E":"double"},"NativeInt16List":{"Int16List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeInt32List":{"Int32List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeInt8List":{"Int8List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint16List":{"Uint16List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint32List":{"Uint32List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint8ClampedList":{"Uint8ClampedList":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint8List":{"Uint8List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"_Error":{"Error":[]},"_TypeError":{"TypeError":[],"Error":[]},"_BufferingStreamSubscription":{"StreamSubscription":["1"],"_EventDispatch":["1"]},"AsyncError":{"Error":[]},"_BroadcastStream":{"_ControllerStream":["1"],"_StreamImpl":["1"],"Stream":["1"]},"_BroadcastSubscription":{"_ControllerSubscription":["1"],"_BufferingStreamSubscription":["1"],"StreamSubscription":["1"],"_EventDispatch":["1"]},"_BroadcastStreamController":{"StreamController":["1"],"_StreamControllerLifecycle":["1"],"_EventDispatch":["1"]},"_SyncBroadcastStreamController":{"_BroadcastStreamController":["1"],"StreamController":["1"],"_StreamControllerLifecycle":["1"],"_EventDispatch":["1"]},"_AsyncCompleter":{"_Completer":["1"]},"_Future":{"Future":["1"]},"_ControllerStream":{"_StreamImpl":["1"],"Stream":["1"]},"_ControllerSubscription":{"_BufferingStreamSubscription":["1"],"StreamSubscription":["1"],"_EventDispatch":["1"]},"_StreamImpl":{"Stream":["1"]},"_DelayedData":{"_DelayedEvent":["1"]},"_DoneStreamSubscription":{"StreamSubscription":["1"]},"_Zone":{"Zone":[]},"_RootZone":{"_Zone":[],"Zone":[]},"_HashMap":{"MapBase":["1","2"],"Map":["1","2"]},"_IdentityHashMap":{"_HashMap":["1","2"],"MapBase":["1","2"],"Map":["1","2"]},"_HashMapKeyIterable":{"EfficientLengthIterable":["1"],"Iterable":["1"],"Iterable.E":"1"},"_HashMapKeyIterator":{"Iterator":["1"]},"MapBase":{"Map":["1","2"]},"MapView":{"Map":["1","2"]},"UnmodifiableMapView":{"_UnmodifiableMapView_MapView__UnmodifiableMapMixin":["1","2"],"MapView":["1","2"],"_UnmodifiableMapMixin":["1","2"],"Map":["1","2"]},"Base64Codec":{"Codec":["List","String"],"Codec.S":"List"},"double":{"num":[]},"int":{"num":[]},"List":{"EfficientLengthIterable":["1"],"Iterable":["1"]},"String":{"Pattern":[]},"AssertionError":{"Error":[]},"TypeError":{"Error":[]},"ArgumentError":{"Error":[]},"RangeError":{"Error":[]},"IndexError":{"Error":[]},"NoSuchMethodError":{"Error":[]},"UnsupportedError":{"Error":[]},"UnimplementedError":{"Error":[]},"StateError":{"Error":[]},"ConcurrentModificationError":{"Error":[]},"OutOfMemoryError":{"Error":[]},"StackOverflowError":{"Error":[]},"_StringStackTrace":{"StackTrace":[]},"Int8List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint8List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint8ClampedList":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Int16List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint16List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Int32List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint32List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Float32List":{"List":["double"],"EfficientLengthIterable":["double"],"Iterable":["double"]},"Float64List":{"List":["double"],"EfficientLengthIterable":["double"],"Iterable":["double"]}}')); + A._Universe_addErasedTypes(init.typeUniverse, JSON.parse('{"EfficientLengthIterable":1,"NativeTypedArray":1,"_DelayedEvent":1,"Converter":2}')); + var string$ = { + Cannot: "Cannot fire new event. Controller is already firing an event", + Error_: "Error handler must accept one Object or one Object and a StackTrace as arguments, and return a value of the returned future's type", + ___las: "]: lastError != CryptorError.kOk, reset state to kNew", + decode: "decodeFunction::decryptFrameInternal: decrypted: " + }; + var type$ = (function rtii() { + var findType = A.findType; + return { + $env_1_1_void: findType("@<~>"), + AsyncError: findType("AsyncError"), + Base64Codec: findType("Base64Codec"), + ByteBuffer: findType("ByteBuffer"), + ByteData: findType("ByteData"), + ConstantMapView_Symbol_dynamic: findType("ConstantMapView"), + EfficientLengthIterable_dynamic: findType("EfficientLengthIterable<@>"), + Error: findType("Error"), + Float32List: findType("Float32List"), + Float64List: findType("Float64List"), + FrameCryptor: findType("FrameCryptor"), + Function: findType("Function"), + Future_dynamic: findType("Future<@>"), + Future_void_Function_JSObject_JSObject: findType("Future<~>(JSObject,JSObject)"), + Int16List: findType("Int16List"), + Int32List: findType("Int32List"), + Int8List: findType("Int8List"), + Invocation: findType("Invocation"), + Iterable_dynamic: findType("Iterable<@>"), + Iterable_int: findType("Iterable"), + Iterable_nullable_Object: findType("Iterable"), + JSArray_String: findType("JSArray"), + JSArray_dynamic: findType("JSArray<@>"), + JSArray_int: findType("JSArray"), + JSArray_nullable_Object: findType("JSArray"), + JSNull: findType("JSNull"), + JSObject: findType("JSObject"), + JavaScriptFunction: findType("JavaScriptFunction"), + JavaScriptIndexingBehavior_dynamic: findType("JavaScriptIndexingBehavior<@>"), + JsLinkedHashMap_Symbol_dynamic: findType("JsLinkedHashMap"), + KeySet: findType("KeySet"), + List_dynamic: findType("List<@>"), + List_int: findType("List"), + List_nullable_KeySet: findType("List"), + LogRecord: findType("LogRecord"), + Logger: findType("Logger"), + Map_dynamic_dynamic: findType("Map<@,@>"), + Map_of_nullable_Object_and_nullable_Object: findType("Map"), + NativeByteBuffer: findType("NativeByteBuffer"), + Null: findType("Null"), + Object: findType("Object"), + ParticipantKeyHandler: findType("ParticipantKeyHandler"), + Record: findType("Record"), + StackTrace: findType("StackTrace"), + String: findType("String"), + Symbol: findType("Symbol0"), + TrustedGetRuntimeType: findType("TrustedGetRuntimeType"), + TypeError: findType("TypeError"), + Uint16List: findType("Uint16List"), + Uint32List: findType("Uint32List"), + Uint8ClampedList: findType("Uint8ClampedList"), + Uint8List: findType("Uint8List"), + UnknownJavaScriptObject: findType("UnknownJavaScriptObject"), + _Future_dynamic: findType("_Future<@>"), + _Future_int: findType("_Future"), + _IdentityHashMap_of_nullable_Object_and_nullable_Object: findType("_IdentityHashMap"), + _SyncBroadcastStreamController_LogRecord: findType("_SyncBroadcastStreamController"), + bool: findType("bool"), + bool_Function_Object: findType("bool(Object)"), + double: findType("double"), + dynamic: findType("@"), + dynamic_Function: findType("@()"), + dynamic_Function_Object: findType("@(Object)"), + dynamic_Function_Object_StackTrace: findType("@(Object,StackTrace)"), + int: findType("int"), + legacy_Never: findType("0&*"), + legacy_Object: findType("Object*"), + nullable_Future_Null: findType("Future?"), + nullable_KeySet: findType("KeySet?"), + nullable_Object: findType("Object?"), + nullable_StreamController_LogRecord: findType("StreamController?"), + nullable_String: findType("String?"), + nullable_Uint8List: findType("Uint8List?"), + nullable__FutureListener_dynamic_dynamic: findType("_FutureListener<@,@>?"), + nullable_void_Function: findType("~()?"), + num: findType("num"), + void: findType("~"), + void_Function: findType("~()"), + void_Function_Object: findType("~(Object)"), + void_Function_Object_StackTrace: findType("~(Object,StackTrace)") + }; + })(); + (function constants() { + var makeConstList = hunkHelpers.makeConstList; + B.Interceptor_methods = J.Interceptor.prototype; + B.JSArray_methods = J.JSArray.prototype; + B.JSInt_methods = J.JSInt.prototype; + B.JSString_methods = J.JSString.prototype; + B.JavaScriptFunction_methods = J.JavaScriptFunction.prototype; + B.JavaScriptObject_methods = J.JavaScriptObject.prototype; + B.NativeByteData_methods = A.NativeByteData.prototype; + B.NativeUint8List_methods = A.NativeUint8List.prototype; + B.PlainJavaScriptObject_methods = J.PlainJavaScriptObject.prototype; + B.UnknownJavaScriptObject_methods = J.UnknownJavaScriptObject.prototype; + B.C_Base64Decoder = new A.Base64Decoder(); + B.C_Base64Encoder = new A.Base64Encoder(); + B.C_JS_CONST = function getTagFallback(o) { + var s = Object.prototype.toString.call(o); + return s.substring(8, s.length - 1); +}; + B.C_JS_CONST0 = function() { + var toStringFunction = Object.prototype.toString; + function getTag(o) { + var s = toStringFunction.call(o); + return s.substring(8, s.length - 1); + } + function getUnknownTag(object, tag) { + if (/^HTML[A-Z].*Element$/.test(tag)) { + var name = toStringFunction.call(object); + if (name == "[object Object]") return null; + return "HTMLElement"; + } + } + function getUnknownTagGenericBrowser(object, tag) { + if (object instanceof HTMLElement) return "HTMLElement"; + return getUnknownTag(object, tag); + } + function prototypeForTag(tag) { + if (typeof window == "undefined") return null; + if (typeof window[tag] == "undefined") return null; + var constructor = window[tag]; + if (typeof constructor != "function") return null; + return constructor.prototype; + } + function discriminator(tag) { return null; } + var isBrowser = typeof HTMLElement == "function"; + return { + getTag: getTag, + getUnknownTag: isBrowser ? getUnknownTagGenericBrowser : getUnknownTag, + prototypeForTag: prototypeForTag, + discriminator: discriminator }; +}; + B.C_JS_CONST6 = function(getTagFallback) { + return function(hooks) { + if (typeof navigator != "object") return hooks; + var userAgent = navigator.userAgent; + if (typeof userAgent != "string") return hooks; + if (userAgent.indexOf("DumpRenderTree") >= 0) return hooks; + if (userAgent.indexOf("Chrome") >= 0) { + function confirm(p) { + return typeof window == "object" && window[p] && window[p].name == p; + } + if (confirm("Window") && confirm("HTMLElement")) return hooks; + } + hooks.getTag = getTagFallback; + }; +}; + B.C_JS_CONST1 = function(hooks) { + if (typeof dartExperimentalFixupGetTag != "function") return hooks; + hooks.getTag = dartExperimentalFixupGetTag(hooks.getTag); +}; + B.C_JS_CONST5 = function(hooks) { + if (typeof navigator != "object") return hooks; + var userAgent = navigator.userAgent; + if (typeof userAgent != "string") return hooks; + if (userAgent.indexOf("Firefox") == -1) return hooks; + var getTag = hooks.getTag; + var quickMap = { + "BeforeUnloadEvent": "Event", + "DataTransfer": "Clipboard", + "GeoGeolocation": "Geolocation", + "Location": "!Location", + "WorkerMessageEvent": "MessageEvent", + "XMLDocument": "!Document"}; + function getTagFirefox(o) { + var tag = getTag(o); + return quickMap[tag] || tag; + } + hooks.getTag = getTagFirefox; +}; + B.C_JS_CONST4 = function(hooks) { + if (typeof navigator != "object") return hooks; + var userAgent = navigator.userAgent; + if (typeof userAgent != "string") return hooks; + if (userAgent.indexOf("Trident/") == -1) return hooks; + var getTag = hooks.getTag; + var quickMap = { + "BeforeUnloadEvent": "Event", + "DataTransfer": "Clipboard", + "HTMLDDElement": "HTMLElement", + "HTMLDTElement": "HTMLElement", + "HTMLPhraseElement": "HTMLElement", + "Position": "Geoposition" + }; + function getTagIE(o) { + var tag = getTag(o); + var newTag = quickMap[tag]; + if (newTag) return newTag; + if (tag == "Object") { + if (window.DataView && (o instanceof window.DataView)) return "DataView"; + } + return tag; + } + function prototypeForTagIE(tag) { + var constructor = window[tag]; + if (constructor == null) return null; + return constructor.prototype; + } + hooks.getTag = getTagIE; + hooks.prototypeForTag = prototypeForTagIE; +}; + B.C_JS_CONST2 = function(hooks) { + var getTag = hooks.getTag; + var prototypeForTag = hooks.prototypeForTag; + function getTagFixed(o) { + var tag = getTag(o); + if (tag == "Document") { + if (!!o.xmlVersion) return "!Document"; + return "!HTMLDocument"; + } + return tag; + } + function prototypeForTagFixed(tag) { + if (tag == "Document") return null; + return prototypeForTag(tag); + } + hooks.getTag = getTagFixed; + hooks.prototypeForTag = prototypeForTagFixed; +}; + B.C_JS_CONST3 = function(hooks) { return hooks; } +; + B.C_OutOfMemoryError = new A.OutOfMemoryError(); + B.C_SentinelValue = new A.SentinelValue(); + B.C__Required = new A._Required(); + B.C__RootZone = new A._RootZone(); + B.C__StringStackTrace = new A._StringStackTrace(); + B.CryptorError_0 = new A.CryptorError("kNew"); + B.CryptorError_1 = new A.CryptorError("kOk"); + B.CryptorError_2 = new A.CryptorError("kDecryptError"); + B.CryptorError_3 = new A.CryptorError("kEncryptError"); + B.CryptorError_5 = new A.CryptorError("kMissingKey"); + B.CryptorError_6 = new A.CryptorError("kKeyRatcheted"); + B.CryptorError_7 = new A.CryptorError("kInternalError"); + B.CryptorError_8 = new A.CryptorError("kDisposed"); + B.Level_CONFIG_700 = new A.Level("CONFIG", 700); + B.Level_FINER_400 = new A.Level("FINER", 400); + B.Level_FINE_500 = new A.Level("FINE", 500); + B.Level_INFO_800 = new A.Level("INFO", 800); + B.Level_WARNING_900 = new A.Level("WARNING", 900); + B.List_empty = A._setArrayType(makeConstList([]), type$.JSArray_dynamic); + B.Object_empty = {}; + B.Map_empty = new A.ConstantStringMap(B.Object_empty, [], A.findType("ConstantStringMap")); + B.Symbol_call = new A.Symbol("call"); + B.Type_ByteBuffer_rqD = A.typeLiteral("ByteBuffer"); + B.Type_ByteData_9dB = A.typeLiteral("ByteData"); + B.Type_Float32List_9Kz = A.typeLiteral("Float32List"); + B.Type_Float64List_9Kz = A.typeLiteral("Float64List"); + B.Type_Int16List_s5h = A.typeLiteral("Int16List"); + B.Type_Int32List_O8Z = A.typeLiteral("Int32List"); + B.Type_Int8List_rFV = A.typeLiteral("Int8List"); + B.Type_JSObject_ttY = A.typeLiteral("JSObject"); + B.Type_Object_A4p = A.typeLiteral("Object"); + B.Type_Uint16List_kmP = A.typeLiteral("Uint16List"); + B.Type_Uint32List_kmP = A.typeLiteral("Uint32List"); + B.Type_Uint8ClampedList_04U = A.typeLiteral("Uint8ClampedList"); + B.Type_Uint8List_8Eb = A.typeLiteral("Uint8List"); + })(); + (function staticFields() { + $._JS_INTEROP_INTERCEPTOR_TAG = null; + $.toStringVisiting = A._setArrayType([], A.findType("JSArray")); + $.Primitives__identityHashCodeProperty = null; + $.BoundClosure__receiverFieldNameCache = null; + $.BoundClosure__interceptorFieldNameCache = null; + $.getTagFunction = null; + $.alternateTagFunction = null; + $.prototypeForTagFunction = null; + $.dispatchRecordsForInstanceTags = null; + $.interceptorsForUncacheableTags = null; + $.initNativeDispatchFlag = null; + $._nextCallback = null; + $._lastCallback = null; + $._lastPriorityCallback = null; + $._isInCallbackLoop = false; + $.Zone__current = B.C__RootZone; + $.participantCryptors = A._setArrayType([], A.findType("JSArray")); + $.keyProviders = A.LinkedHashMap_LinkedHashMap$_empty(type$.String, A.findType("KeyProvider")); + $.LogRecord__nextNumber = 0; + $.Logger__loggers = A.LinkedHashMap_LinkedHashMap$_empty(type$.String, type$.Logger); + })(); + (function lazyInitializers() { + var _lazyFinal = hunkHelpers.lazyFinal, + _lazy = hunkHelpers.lazy; + _lazyFinal($, "DART_CLOSURE_PROPERTY_NAME", "$get$DART_CLOSURE_PROPERTY_NAME", () => A.getIsolateAffinityTag("_$dart_dartClosure")); + _lazyFinal($, "_CopyingBytesBuilder__emptyList", "$get$_CopyingBytesBuilder__emptyList", () => A.NativeUint8List_NativeUint8List(0)); + _lazyFinal($, "TypeErrorDecoder_noSuchMethodPattern", "$get$TypeErrorDecoder_noSuchMethodPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn({ + toString: function() { + return "$receiver$"; + } + }))); + _lazyFinal($, "TypeErrorDecoder_notClosurePattern", "$get$TypeErrorDecoder_notClosurePattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn({$method$: null, + toString: function() { + return "$receiver$"; + } + }))); + _lazyFinal($, "TypeErrorDecoder_nullCallPattern", "$get$TypeErrorDecoder_nullCallPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn(null))); + _lazyFinal($, "TypeErrorDecoder_nullLiteralCallPattern", "$get$TypeErrorDecoder_nullLiteralCallPattern", () => A.TypeErrorDecoder_extractPattern(function() { + var $argumentsExpr$ = "$arguments$"; + try { + null.$method$($argumentsExpr$); + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "TypeErrorDecoder_undefinedCallPattern", "$get$TypeErrorDecoder_undefinedCallPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn(void 0))); + _lazyFinal($, "TypeErrorDecoder_undefinedLiteralCallPattern", "$get$TypeErrorDecoder_undefinedLiteralCallPattern", () => A.TypeErrorDecoder_extractPattern(function() { + var $argumentsExpr$ = "$arguments$"; + try { + (void 0).$method$($argumentsExpr$); + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "TypeErrorDecoder_nullPropertyPattern", "$get$TypeErrorDecoder_nullPropertyPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokePropertyErrorOn(null))); + _lazyFinal($, "TypeErrorDecoder_nullLiteralPropertyPattern", "$get$TypeErrorDecoder_nullLiteralPropertyPattern", () => A.TypeErrorDecoder_extractPattern(function() { + try { + null.$method$; + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "TypeErrorDecoder_undefinedPropertyPattern", "$get$TypeErrorDecoder_undefinedPropertyPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokePropertyErrorOn(void 0))); + _lazyFinal($, "TypeErrorDecoder_undefinedLiteralPropertyPattern", "$get$TypeErrorDecoder_undefinedLiteralPropertyPattern", () => A.TypeErrorDecoder_extractPattern(function() { + try { + (void 0).$method$; + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "_AsyncRun__scheduleImmediateClosure", "$get$_AsyncRun__scheduleImmediateClosure", () => A._AsyncRun__initializeScheduleImmediate()); + _lazyFinal($, "_Base64Decoder__inverseAlphabet", "$get$_Base64Decoder__inverseAlphabet", () => new Int8Array(A._ensureNativeList(A._setArrayType([-2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -1, -2, -2, -2, -2, -2, 62, -2, 62, -2, 63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -2, -2, -2, -1, -2, -2, -2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -2, -2, -2, -2, 63, -2, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -2, -2, -2, -2, -2], type$.JSArray_int)))); + _lazy($, "_Base64Decoder__emptyBuffer", "$get$_Base64Decoder__emptyBuffer", () => A.NativeUint8List_NativeUint8List(0)); + _lazyFinal($, "_hashSeed", "$get$_hashSeed", () => A.objectHashCode(B.Type_Object_A4p)); + _lazyFinal($, "Random__secureRandom", "$get$Random__secureRandom", () => { + var t1 = new A._JSSecureRandom(A.NativeByteData_NativeByteData(8)); + t1._JSSecureRandom$0(); + return t1; + }); + _lazyFinal($, "logger", "$get$logger", () => A.Logger_Logger("E2EE.Worker")); + _lazyFinal($, "Logger_root", "$get$Logger_root", () => A.Logger_Logger("")); + })(); + (function nativeSupport() { + !function() { + var intern = function(s) { + var o = {}; + o[s] = 1; + return Object.keys(hunkHelpers.convertToFastObject(o))[0]; + }; + init.getIsolateTag = function(name) { + return intern("___dart_" + name + init.isolateTag); + }; + var tableProperty = "___dart_isolate_tags_"; + var usedProperties = Object[tableProperty] || (Object[tableProperty] = Object.create(null)); + var rootProperty = "_ZxYxX"; + for (var i = 0;; i++) { + var property = intern(rootProperty + "_" + i + "_"); + if (!(property in usedProperties)) { + usedProperties[property] = 1; + init.isolateTag = property; + break; + } + } + init.dispatchPropertyName = init.getIsolateTag("dispatch_record"); + }(); + hunkHelpers.setOrUpdateInterceptorsByTag({ArrayBuffer: A.NativeByteBuffer, ArrayBufferView: A.NativeTypedData, DataView: A.NativeByteData, Float32Array: A.NativeFloat32List, Float64Array: A.NativeFloat64List, Int16Array: A.NativeInt16List, Int32Array: A.NativeInt32List, Int8Array: A.NativeInt8List, Uint16Array: A.NativeUint16List, Uint32Array: A.NativeUint32List, Uint8ClampedArray: A.NativeUint8ClampedList, CanvasPixelArray: A.NativeUint8ClampedList, Uint8Array: A.NativeUint8List}); + hunkHelpers.setOrUpdateLeafTags({ArrayBuffer: true, ArrayBufferView: false, DataView: true, Float32Array: true, Float64Array: true, Int16Array: true, Int32Array: true, Int8Array: true, Uint16Array: true, Uint32Array: true, Uint8ClampedArray: true, CanvasPixelArray: true, Uint8Array: false}); + A.NativeTypedArray.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A.NativeTypedArrayOfDouble.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A.NativeTypedArrayOfInt.$nativeSuperclassTag = "ArrayBufferView"; + })(); + Function.prototype.call$1 = function(a) { + return this(a); + }; + Function.prototype.call$0 = function() { + return this(); + }; + Function.prototype.call$2 = function(a, b) { + return this(a, b); + }; + Function.prototype.call$3 = function(a, b, c) { + return this(a, b, c); + }; + Function.prototype.call$4 = function(a, b, c, d) { + return this(a, b, c, d); + }; + Function.prototype.call$1$1 = function(a) { + return this(a); + }; + convertAllToFastObject(holders); + convertToFastObject($); + (function(callback) { + if (typeof document === "undefined") { + callback(null); + return; + } + if (typeof document.currentScript != "undefined") { + callback(document.currentScript); + return; + } + var scripts = document.scripts; + function onLoad(event) { + for (var i = 0; i < scripts.length; ++i) { + scripts[i].removeEventListener("load", onLoad, false); + } + callback(event.target); + } + for (var i = 0; i < scripts.length; ++i) { + scripts[i].addEventListener("load", onLoad, false); + } + })(function(currentScript) { + init.currentScript = currentScript; + var callMain = A.main; + if (typeof dartMainRunner === "function") { + dartMainRunner(callMain, []); + } else { + callMain([]); + } + }); +})(); + +//# sourceMappingURL=e2ee.worker.dart.js.map diff --git a/example/web/e2ee.worker.dart.js.deps b/example/web/e2ee.worker.dart.js.deps new file mode 100644 index 0000000000..45a7692f9d --- /dev/null +++ b/example/web/e2ee.worker.dart.js.deps @@ -0,0 +1,439 @@ +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/collection.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/algorithms.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/boollist.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/canonicalized_map.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_iterable.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_iterator.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_list.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_map.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/comparators.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/empty_unmodifiable_set.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/equality.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/equality_map.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/equality_set.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/functions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/iterable_extensions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/iterable_zip.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/list_extensions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/priority_queue.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/queue_list.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/union_set.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/union_set_controller.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/unmodifiable_wrappers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/utils.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/wrappers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/js-0.7.1/lib/js.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/js-0.7.1/lib/js_util.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/logging.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/level.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/log_record.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/logger.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/accelerometer.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/angle_instanced_arrays.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/attribution_reporting_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/background_sync.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/battery_status.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/clipboard_apis.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/compression.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/console.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/cookie_store.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/credential_management.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/csp.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_animations.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_animations_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_cascade.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_cascade_6.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_conditional.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_conditional_5.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_contain.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_counter_styles.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_font_loading.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_fonts.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_highlight_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_masking.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_paint_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_properties_values_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_transitions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_transitions_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_typed_om.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_view_transitions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_view_transitions_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/cssom.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/cssom_view.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/digital_identities.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/dom.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/dom_parsing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/encoding.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/encrypted_media.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/entries_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/event_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_blend_minmax.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_color_buffer_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_color_buffer_half_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_disjoint_timer_query.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_disjoint_timer_query_webgl2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_float_blend.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_frag_depth.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_shader_texture_lod.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_srgb.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_compression_bptc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_compression_rgtc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_filter_anisotropic.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_norm16.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fedcm.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fetch.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fido.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fileapi.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/filter_effects.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fs.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fullscreen.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/gamepad.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/generic_sensor.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/geolocation.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/geometry.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/gyroscope.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/hr_time.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/html.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/image_capture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/indexeddb.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/intersection_observer.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/khr_parallel_shader_compile.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/largest_contentful_paint.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mathml_core.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/media_capabilities.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/media_playback_quality.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/media_source.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediacapture_fromelement.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediacapture_streams.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediacapture_transform.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediasession.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediastream_recording.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mst_content_hint.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/navigation_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/netinfo.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/notifications.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_draw_buffers_indexed.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_element_index_uint.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_fbo_render_mipmap.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_standard_derivatives.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_float_linear.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_half_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_half_float_linear.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_vertex_array_object.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/orientation_event.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/orientation_sensor.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ovr_multiview2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/paint_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/payment_request.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/performance_timeline.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/permissions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/picture_in_picture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/pointerevents.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/pointerlock.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/private_network_access.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/push_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/referrer_policy.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/remote_playback.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/reporting.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/requestidlecallback.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/resize_observer.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/resource_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/saa_non_cookie_storage.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/sanitizer_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/scheduling_apis.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/screen_capture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/screen_orientation.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/screen_wake_lock.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/secure_payment_confirmation.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/selection_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/server_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/service_workers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/speech_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/storage.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/streams.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/svg.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/svg_animations.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/touch_events.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/trust_token_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/trusted_types.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/uievents.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/url.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/user_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/vibration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/video_rvfc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/wasm_js_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_animations.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_animations_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_bluetooth.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_locks.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_otp.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_share.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webaudio.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webauthn.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_av1_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_avc_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_hevc_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_vp9_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcryptoapi.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl1.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_color_buffer_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_astc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_etc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_etc1.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_pvrtc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_s3tc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_s3tc_srgb.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_debug_renderer_info.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_debug_shaders.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_depth_texture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_draw_buffers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_lose_context.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_multi_draw.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgpu.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webidl.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webmidi.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc_encoded_transform.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc_identity.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc_priority.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/websockets.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webtransport.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webvtt.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webxr.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webxr_hand_input.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/xhr.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/cross_origin.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/enums.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/events/events.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/events/providers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/events/streams.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/extensions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/http.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/lists.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/renames.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/web.dart +file:///Users/duan/Desktop/dart-webrtc/.dart_tool/package_config.json +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.cryptor.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.keyhandler.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.logger.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.sfi_guard.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.utils.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.worker.dart +file:///Users/duan/bin/flutter/bin/cache/dart-sdk/lib/_internal/dart2js_platform.dill +file:///Users/duan/bin/flutter/bin/cache/dart-sdk/lib/libraries.json +org-dartlang-sdk:///lib/_http/crypto.dart +org-dartlang-sdk:///lib/_http/embedder_config.dart +org-dartlang-sdk:///lib/_http/http.dart +org-dartlang-sdk:///lib/_http/http_date.dart +org-dartlang-sdk:///lib/_http/http_headers.dart +org-dartlang-sdk:///lib/_http/http_impl.dart +org-dartlang-sdk:///lib/_http/http_parser.dart +org-dartlang-sdk:///lib/_http/http_session.dart +org-dartlang-sdk:///lib/_http/http_testing.dart +org-dartlang-sdk:///lib/_http/overrides.dart +org-dartlang-sdk:///lib/_http/websocket.dart +org-dartlang-sdk:///lib/_http/websocket_impl.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/annotations.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/async_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/bigint_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/collection_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/constant_map.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/convert_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/core_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/dart2js_only.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/dart2js_runtime_metrics.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/developer_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/foreign_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/instantiation.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/interceptors.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/internal_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/io_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/isolate_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_allow_interop_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_array.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_names.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_number.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_primitives.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_string.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/late_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/linked_hash_map.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/math_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_typed_data.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/records.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/regexp_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/string_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/synced/array_flags.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/synced/embedded_names.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/synced/invocation_mirror_constants.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/typed_data_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/convert_utf_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/date_time_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_unsafe_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_types.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_util_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/rti.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/async_status_codes.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/embedded_names.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/recipe_syntax.dart +org-dartlang-sdk:///lib/async/async.dart +org-dartlang-sdk:///lib/async/async_error.dart +org-dartlang-sdk:///lib/async/broadcast_stream_controller.dart +org-dartlang-sdk:///lib/async/deferred_load.dart +org-dartlang-sdk:///lib/async/future.dart +org-dartlang-sdk:///lib/async/future_extensions.dart +org-dartlang-sdk:///lib/async/future_impl.dart +org-dartlang-sdk:///lib/async/schedule_microtask.dart +org-dartlang-sdk:///lib/async/stream.dart +org-dartlang-sdk:///lib/async/stream_controller.dart +org-dartlang-sdk:///lib/async/stream_impl.dart +org-dartlang-sdk:///lib/async/stream_pipe.dart +org-dartlang-sdk:///lib/async/stream_transformers.dart +org-dartlang-sdk:///lib/async/timer.dart +org-dartlang-sdk:///lib/async/zone.dart +org-dartlang-sdk:///lib/collection/collection.dart +org-dartlang-sdk:///lib/collection/collections.dart +org-dartlang-sdk:///lib/collection/hash_map.dart +org-dartlang-sdk:///lib/collection/hash_set.dart +org-dartlang-sdk:///lib/collection/iterable.dart +org-dartlang-sdk:///lib/collection/iterator.dart +org-dartlang-sdk:///lib/collection/linked_hash_map.dart +org-dartlang-sdk:///lib/collection/linked_hash_set.dart +org-dartlang-sdk:///lib/collection/linked_list.dart +org-dartlang-sdk:///lib/collection/list.dart +org-dartlang-sdk:///lib/collection/maps.dart +org-dartlang-sdk:///lib/collection/queue.dart +org-dartlang-sdk:///lib/collection/set.dart +org-dartlang-sdk:///lib/collection/splay_tree.dart +org-dartlang-sdk:///lib/convert/ascii.dart +org-dartlang-sdk:///lib/convert/base64.dart +org-dartlang-sdk:///lib/convert/byte_conversion.dart +org-dartlang-sdk:///lib/convert/chunked_conversion.dart +org-dartlang-sdk:///lib/convert/codec.dart +org-dartlang-sdk:///lib/convert/convert.dart +org-dartlang-sdk:///lib/convert/converter.dart +org-dartlang-sdk:///lib/convert/encoding.dart +org-dartlang-sdk:///lib/convert/html_escape.dart +org-dartlang-sdk:///lib/convert/json.dart +org-dartlang-sdk:///lib/convert/latin1.dart +org-dartlang-sdk:///lib/convert/line_splitter.dart +org-dartlang-sdk:///lib/convert/string_conversion.dart +org-dartlang-sdk:///lib/convert/utf.dart +org-dartlang-sdk:///lib/core/annotations.dart +org-dartlang-sdk:///lib/core/bigint.dart +org-dartlang-sdk:///lib/core/bool.dart +org-dartlang-sdk:///lib/core/comparable.dart +org-dartlang-sdk:///lib/core/core.dart +org-dartlang-sdk:///lib/core/date_time.dart +org-dartlang-sdk:///lib/core/double.dart +org-dartlang-sdk:///lib/core/duration.dart +org-dartlang-sdk:///lib/core/enum.dart +org-dartlang-sdk:///lib/core/errors.dart +org-dartlang-sdk:///lib/core/exceptions.dart +org-dartlang-sdk:///lib/core/function.dart +org-dartlang-sdk:///lib/core/identical.dart +org-dartlang-sdk:///lib/core/int.dart +org-dartlang-sdk:///lib/core/invocation.dart +org-dartlang-sdk:///lib/core/iterable.dart +org-dartlang-sdk:///lib/core/iterator.dart +org-dartlang-sdk:///lib/core/list.dart +org-dartlang-sdk:///lib/core/map.dart +org-dartlang-sdk:///lib/core/null.dart +org-dartlang-sdk:///lib/core/num.dart +org-dartlang-sdk:///lib/core/object.dart +org-dartlang-sdk:///lib/core/pattern.dart +org-dartlang-sdk:///lib/core/print.dart +org-dartlang-sdk:///lib/core/record.dart +org-dartlang-sdk:///lib/core/regexp.dart +org-dartlang-sdk:///lib/core/set.dart +org-dartlang-sdk:///lib/core/sink.dart +org-dartlang-sdk:///lib/core/stacktrace.dart +org-dartlang-sdk:///lib/core/stopwatch.dart +org-dartlang-sdk:///lib/core/string.dart +org-dartlang-sdk:///lib/core/string_buffer.dart +org-dartlang-sdk:///lib/core/string_sink.dart +org-dartlang-sdk:///lib/core/symbol.dart +org-dartlang-sdk:///lib/core/type.dart +org-dartlang-sdk:///lib/core/uri.dart +org-dartlang-sdk:///lib/core/weak.dart +org-dartlang-sdk:///lib/developer/developer.dart +org-dartlang-sdk:///lib/developer/extension.dart +org-dartlang-sdk:///lib/developer/http_profiling.dart +org-dartlang-sdk:///lib/developer/profiler.dart +org-dartlang-sdk:///lib/developer/service.dart +org-dartlang-sdk:///lib/developer/timeline.dart +org-dartlang-sdk:///lib/html/dart2js/html_dart2js.dart +org-dartlang-sdk:///lib/html/html_common/conversions.dart +org-dartlang-sdk:///lib/html/html_common/conversions_dart2js.dart +org-dartlang-sdk:///lib/html/html_common/css_class_set.dart +org-dartlang-sdk:///lib/html/html_common/device.dart +org-dartlang-sdk:///lib/html/html_common/filtered_element_list.dart +org-dartlang-sdk:///lib/html/html_common/html_common_dart2js.dart +org-dartlang-sdk:///lib/html/html_common/lists.dart +org-dartlang-sdk:///lib/html/html_common/metadata.dart +org-dartlang-sdk:///lib/indexed_db/dart2js/indexed_db_dart2js.dart +org-dartlang-sdk:///lib/internal/async_cast.dart +org-dartlang-sdk:///lib/internal/bytes_builder.dart +org-dartlang-sdk:///lib/internal/cast.dart +org-dartlang-sdk:///lib/internal/errors.dart +org-dartlang-sdk:///lib/internal/internal.dart +org-dartlang-sdk:///lib/internal/iterable.dart +org-dartlang-sdk:///lib/internal/linked_list.dart +org-dartlang-sdk:///lib/internal/list.dart +org-dartlang-sdk:///lib/internal/patch.dart +org-dartlang-sdk:///lib/internal/print.dart +org-dartlang-sdk:///lib/internal/sort.dart +org-dartlang-sdk:///lib/internal/symbol.dart +org-dartlang-sdk:///lib/io/common.dart +org-dartlang-sdk:///lib/io/data_transformer.dart +org-dartlang-sdk:///lib/io/directory.dart +org-dartlang-sdk:///lib/io/directory_impl.dart +org-dartlang-sdk:///lib/io/embedder_config.dart +org-dartlang-sdk:///lib/io/eventhandler.dart +org-dartlang-sdk:///lib/io/file.dart +org-dartlang-sdk:///lib/io/file_impl.dart +org-dartlang-sdk:///lib/io/file_system_entity.dart +org-dartlang-sdk:///lib/io/io.dart +org-dartlang-sdk:///lib/io/io_resource_info.dart +org-dartlang-sdk:///lib/io/io_service.dart +org-dartlang-sdk:///lib/io/io_sink.dart +org-dartlang-sdk:///lib/io/link.dart +org-dartlang-sdk:///lib/io/namespace_impl.dart +org-dartlang-sdk:///lib/io/network_profiling.dart +org-dartlang-sdk:///lib/io/overrides.dart +org-dartlang-sdk:///lib/io/platform.dart +org-dartlang-sdk:///lib/io/platform_impl.dart +org-dartlang-sdk:///lib/io/process.dart +org-dartlang-sdk:///lib/io/secure_server_socket.dart +org-dartlang-sdk:///lib/io/secure_socket.dart +org-dartlang-sdk:///lib/io/security_context.dart +org-dartlang-sdk:///lib/io/service_object.dart +org-dartlang-sdk:///lib/io/socket.dart +org-dartlang-sdk:///lib/io/stdio.dart +org-dartlang-sdk:///lib/io/string_transformer.dart +org-dartlang-sdk:///lib/io/sync_socket.dart +org-dartlang-sdk:///lib/isolate/capability.dart +org-dartlang-sdk:///lib/isolate/isolate.dart +org-dartlang-sdk:///lib/js/_js.dart +org-dartlang-sdk:///lib/js/_js_annotations.dart +org-dartlang-sdk:///lib/js/_js_client.dart +org-dartlang-sdk:///lib/js/js.dart +org-dartlang-sdk:///lib/js_interop/js_interop.dart +org-dartlang-sdk:///lib/js_interop_unsafe/js_interop_unsafe.dart +org-dartlang-sdk:///lib/js_util/js_util.dart +org-dartlang-sdk:///lib/math/math.dart +org-dartlang-sdk:///lib/math/point.dart +org-dartlang-sdk:///lib/math/random.dart +org-dartlang-sdk:///lib/math/rectangle.dart +org-dartlang-sdk:///lib/svg/dart2js/svg_dart2js.dart +org-dartlang-sdk:///lib/typed_data/typed_data.dart +org-dartlang-sdk:///lib/web_audio/dart2js/web_audio_dart2js.dart +org-dartlang-sdk:///lib/web_gl/dart2js/web_gl_dart2js.dart \ No newline at end of file diff --git a/example/web/e2ee.worker.dart.js.map b/example/web/e2ee.worker.dart.js.map new file mode 100644 index 0000000000..8288579a95 --- /dev/null +++ b/example/web/e2ee.worker.dart.js.map @@ -0,0 +1,16 @@ +{ + "version": 3, + "engine": "v2", + "file": "e2ee.worker.dart.js", + "sourceRoot": "", + "sources": ["org-dartlang-sdk:///lib/_internal/js_runtime/lib/interceptors.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_helper.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_helper.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_array.dart","org-dartlang-sdk:///lib/internal/internal.dart","org-dartlang-sdk:///lib/internal/iterable.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_names.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/rti.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/date_time_patch.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/linked_hash_map.dart","org-dartlang-sdk:///lib/core/errors.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/records.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/string_helper.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_typed_data.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/recipe_syntax.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/async_patch.dart","org-dartlang-sdk:///lib/async/future_impl.dart","org-dartlang-sdk:///lib/async/zone.dart","org-dartlang-sdk:///lib/async/async_error.dart","org-dartlang-sdk:///lib/async/schedule_microtask.dart","org-dartlang-sdk:///lib/async/stream.dart","org-dartlang-sdk:///lib/async/stream_impl.dart","org-dartlang-sdk:///lib/async/stream_controller.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/collection_patch.dart","org-dartlang-sdk:///lib/collection/maps.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/core_patch.dart","org-dartlang-sdk:///lib/convert/base64.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_string.dart","org-dartlang-sdk:///lib/core/date_time.dart","org-dartlang-sdk:///lib/core/exceptions.dart","org-dartlang-sdk:///lib/core/iterable.dart","org-dartlang-sdk:///lib/core/object.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_allow_interop_patch.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/js_util_patch.dart","../lib/src/e2ee.worker/e2ee.cryptor.dart","../lib/src/e2ee.worker/e2ee.keyhandler.dart","../lib/src/e2ee.worker/e2ee.worker.dart","../../../.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/logger.dart","../lib/src/e2ee.worker/e2ee.sfi_guard.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_primitives.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/late_helper.dart","org-dartlang-sdk:///lib/internal/errors.dart","../../../.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/iterable_extensions.dart","../lib/src/e2ee.worker/e2ee.utils.dart","org-dartlang-sdk:///lib/collection/list.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_number.dart","org-dartlang-sdk:///lib/internal/bytes_builder.dart","org-dartlang-sdk:///lib/typed_data/typed_data.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/internal_patch.dart","org-dartlang-sdk:///lib/internal/symbol.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/constant_map.dart","org-dartlang-sdk:///lib/async/broadcast_stream_controller.dart","org-dartlang-sdk:///lib/core/enum.dart","org-dartlang-sdk:///lib/core/null.dart","org-dartlang-sdk:///lib/core/stacktrace.dart","org-dartlang-sdk:///lib/js_util/js_util.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/math_patch.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_patch.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_unsafe_patch.dart","org-dartlang-sdk:///lib/convert/codec.dart","../../../.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/level.dart","../../../.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/log_record.dart","../lib/src/e2ee.worker/e2ee.logger.dart","org-dartlang-sdk:///lib/async/future.dart","org-dartlang-sdk:///lib/core/print.dart"], + "names": ["makeDispatchRecord","getNativeInterceptor","lookupInterceptorByConstructor","JS_INTEROP_INTERCEPTOR_TAG","cacheInterceptorOnConstructor","JSArray.fixed","JSArray.markFixed","SystemHash.combine","SystemHash.finish","checkNotNullable","isToStringVisiting","MappedIterable","unminifyOrTag","isJsIndexable","S","Primitives.objectHashCode","Primitives.objectTypeName","Primitives._objectTypeNameNewRti","Primitives.safeToString","Primitives.stringSafeToString","Primitives.stringFromNativeUint8List","Primitives.lazyAsJsDate","Primitives.getYear","Primitives.getMonth","Primitives.getDay","Primitives.getHours","Primitives.getMinutes","Primitives.getSeconds","Primitives.getMilliseconds","Primitives.functionNoSuchMethod","createUnmangledInvocationMirror","Primitives.applyFunction","Primitives._generalApplyFunction","JsLinkedHashMap.isNotEmpty","Primitives.extractStackTrace","Primitives.trySetStackTrace","iae","ioore","diagnoseIndexError","diagnoseRangeError","argumentErrorValue","wrapException","initializeExceptionWrapper","toStringWrapper","throwExpression","throwExpressionWithWrapper","throwUnsupportedOperation","_diagnoseUnsupportedOperation","throwConcurrentModificationError","TypeErrorDecoder.extractPattern","TypeErrorDecoder.provokeCallErrorOn","TypeErrorDecoder.provokePropertyErrorOn","JsNoSuchMethodError","unwrapException","saveStackTrace","_unwrapNonDartException","getTraceFromException","objectHashCode","fillLiteralMap","_invokeClosure","convertDartClosureToJS","convertDartClosureToJSUncached","Closure.fromTearOff","Closure._computeSignatureFunctionNewRti","Closure.cspForwardCall","Closure.forwardCallTo","Closure.cspForwardInterceptedCall","Closure.forwardInterceptedCallTo","closureFromTearOff","BoundClosure.evalRecipe","evalInInstance","_rtiEval","BoundClosure.receiverOf","BoundClosure.interceptorOf","BoundClosure._computeFieldNamed","boolConversionCheck","assertThrow","throwCyclicInit","getIsolateAffinityTag","defineProperty","lookupAndCacheInterceptor","setDispatchProperty","patchInstance","lookupInterceptor","patchProto","patchInteriorProto","makeLeafDispatchRecord","makeDefaultDispatchRecord","initNativeDispatch","initNativeDispatchContinue","initHooks","applyHooksTransformer","createRecordTypePredicate","quoteStringForRegExp","NativeByteData","_ensureNativeList","NativeUint8List","NativeUint8List.view","_checkValidIndex","_checkValidRange","Rti._getQuestionFromStar","Rti._getStarArgument","Rti._getFutureFromFutureOr","Rti._getFutureOrArgument","Rti._isUnionOfFunctionType","Rti._getKind","Rti._getCanonicalRecipe","findType","_substitute","Rti._getInterfaceName","Rti._getBindingBase","Rti._getRecordPartialShapeTag","Rti._getReturnType","Rti._getGenericFunctionBase","Rti._getGenericFunctionParameterIndex","_substituteArray","_substituteNamed","_substituteFunctionParameters","_FunctionParameters.allocate","_setArrayType","closureFunctionType","instanceOrFunctionType","instanceType","_arrayInstanceType","_instanceType","_instanceTypeFromConstructor","_instanceTypeFromConstructorMiss","getTypeFromTypesTable","getRuntimeTypeOfDartObject","_structuralTypeOf","_instanceFunctionType","createRuntimeType","_createAndCacheRuntimeType","_createRuntimeType","_Type","typeLiteral","_installSpecializedIsTest","isDefinitelyTopType","_recordSpecializedIsTest","_finishIsFn","_installSpecializedAsCheck","_nullIs","_generalIsTestImplementation","_generalNullableIsTestImplementation","Rti._getQuestionArgument","_isTestViaProperty","_isListTestViaProperty","_generalAsCheckImplementation","_generalNullableAsCheckImplementation","_failedAsCheck","_Error.compose","_TypeError.fromMessage","_TypeError.forType","_isFutureOr","_isObject","_asObject","_isTop","_asTop","_isNever","_isBool","_asBool","_asBoolS","_asBoolQ","_asDouble","_asDoubleS","_asDoubleQ","_isInt","_asInt","_asIntS","_asIntQ","_isNum","_asNum","_asNumS","_asNumQ","_isString","_asString","_asStringS","_asStringQ","_rtiArrayToString","_recordRtiToString","_functionRtiToString","isLegacyObjectType","_rtiToString","_unminifyOrTag","_Universe.findRule","_Universe._findRule","_Universe.findErasedType","_Universe.addRules","_Universe.addErasedTypes","_Universe.eval","_Universe.evalInEnvironment","_Universe.bind","_Universe._installTypeTests","_Universe._lookupTerminalRti","Rti.allocate","_Universe._createTerminalRti","_Universe._installRti","_Universe._lookupStarRti","_Universe._createStarRti","_Universe._lookupQuestionRti","_Universe._createQuestionRti","_Universe._lookupFutureOrRti","_Universe._createFutureOrRti","_Universe._lookupGenericFunctionParameterRti","_Universe._createGenericFunctionParameterRti","_Universe._canonicalRecipeJoin","_Universe._canonicalRecipeJoinNamed","_Universe._lookupInterfaceRti","_Universe._canonicalRecipeOfInterface","_Universe._createInterfaceRti","_Universe._lookupBindingRti","_Universe._createBindingRti","_Universe._lookupRecordRti","_Universe._createRecordRti","_Universe._lookupFunctionRti","_Universe._canonicalRecipeOfFunction","_Universe._canonicalRecipeOfFunctionParameters","_Universe._createFunctionRti","_Universe._lookupGenericFunctionRti","_Universe._createGenericFunctionRti","_Parser.create","_Parser.parse","_Parser.toGenericFunctionParameter","_Parser.pushStackFrame","_Parser.collectArray","_Parser.handleOptionalGroup","_Parser.collectNamed","_Parser.handleNamedGroup","_Parser.handleStartRecord","_Parser.handleDigit","_Parser.handleIdentifier","_Universe.evalTypeVariable","_Parser.handleTypeArguments","_Parser.handleArguments","_Parser.handleExtendedOperations","_Parser.toType","_Parser.toTypes","_Parser.toTypesNamed","_Parser.indexToType","isSubtype","_isSubtype","isBottomType","_isFunctionSubtype","_isInterfaceSubtype","_Utils.newArrayOrEmpty","_areArgumentsSubtypes","_isRecordSubtype","isNullable","isSoundTopType","_Utils.objectAssign","_AsyncRun._initializeScheduleImmediate","_AsyncRun._scheduleImmediateJsOverride","_AsyncRun._scheduleImmediateWithSetImmediate","_AsyncRun._scheduleImmediateWithTimer","_TimerImpl","_makeAsyncAwaitCompleter","_AsyncAwaitCompleter._future","_asyncStartSync","_asyncAwait","_asyncReturn","_asyncRethrow","_awaitOnObject","_wrapJsFunctionForAsync","AsyncError.defaultStackTrace","_interceptError","_interceptUserError","_Future._chainCoreFuture","_Future._propagateToListeners","_registerErrorHandler","_microtaskLoop","_startMicrotaskLoop","_scheduleAsyncCallback","_schedulePriorityAsyncCallback","scheduleMicrotask","StreamIterator","_runGuarded","_BufferingStreamSubscription._registerErrorHandler","_nullErrorHandler","_nullDoneHandler","_rootHandleError","_rootRun","_rootRunUnary","_rootRunBinary","_rootScheduleMicrotask","_HashMap._getTableEntry","_HashMap._setTableEntry","_HashMap._newHashTable","LinkedHashMap._literal","LinkedHashMap._empty","MapBase.mapToString","_Base64Encoder.encodeChunk","_Base64Decoder.decodeChunk","_Base64Decoder._allocateBuffer","_Base64Decoder._trimPaddingChars","_Base64Decoder._checkPadding","Error._throw","List.filled","List.of","List._of","List._ofArray","JSArray.markGrowable","String.fromCharCodes","String._stringFromUint8List","StringBuffer._writeAll","NoSuchMethodError.withInvocation","StackTrace.current","DateTime._fourDigits","DateTime._threeDigits","DateTime._twoDigits","Error.safeToString","Error.throwWithStackTrace","AssertionError","ArgumentError","ArgumentError.value","RangeError.value","RangeError.range","RangeError.checkValidRange","RangeError.checkNotNegative","IndexError.withLength","UnsupportedError","UnimplementedError","StateError","ConcurrentModificationError","Exception","FormatException","Iterable.iterableToShortString","Iterable.iterableToFullString","_iterablePartsToStrings","Object.hash","_convertDartFunctionFast","_callDartFunctionFast","allowInterop","_functionToJS1","_callDartFunctionFast1","_noJsifyRequired","jsify","callMethod","promiseToFuture","_Completer.future","Completer","_noDartifyRequired","dartify","findNALUIndices","ParticipantKeyHandler","getTrackCryptor","FrameCryptor","FrameCryptor.sifGuard","FrameCryptor.setParticipant","unsetCryptorParticipant","main","Logger","printString","throwLateFieldNI","throwLateFieldADI","IterableExtension.firstWhereOrNull","getAlgoOptions","Interceptor.hashCode","Interceptor.==","Interceptor.toString","Interceptor.noSuchMethod","Interceptor.runtimeType","JSBool.toString","JSBool.hashCode","JSBool.runtimeType","JSNull.==","JSNull.toString","JSNull.hashCode","LegacyJavaScriptObject.toString","LegacyJavaScriptObject.hashCode","LegacyJavaScriptObject.runtimeType","JavaScriptFunction.toString","JavaScriptBigInt.toString","JavaScriptBigInt.hashCode","JavaScriptSymbol.toString","JavaScriptSymbol.hashCode","JSArray.add","JSArray.addAll","JSArray._addAllFromArray","JSArray.map","JSArray.elementAt","JSArray.toString","JSArray.iterator","JSArray.hashCode","JSArray.length","JSArray.[]","JSArray.[]=","JSArray.runtimeType","getRuntimeTypeOfArray","ArrayIterator.current","ArrayIterator.moveNext","ArrayIterator._current","JSNumber.toInt","JSNumber.truncateToDouble","JSNumber.toRadixString","JSNumber.toString","JSNumber.hashCode","JSNumber.%","JSNumber._tdivFast","JSNumber._tdivSlow","JSNumber._shrOtherPositive","JSNumber._shrBothPositive","JSNumber.runtimeType","JSInt.runtimeType","JSNumNotInt.runtimeType","JSString.endsWith","JSString.startsWith","JSString.substring","JSString.substring[function-entry$1]","JSString.*","JSString.lastIndexOf","JSString.toString","JSString.hashCode","JSString.runtimeType","JSString.length","JSString.[]","_CopyingBytesBuilder.add","_CopyingBytesBuilder._grow","_CopyingBytesBuilder.toBytes","NativeUint8List.fromList","_CopyingBytesBuilder.length","LateError.toString","ListIterable.iterator","ListIterable.map","ListIterator.current","ListIterator.moveNext","ListIterator._current","MappedIterable.iterator","MappedIterable.length","MappedIterator.moveNext","MappedIterator.current","MappedIterator._current","MappedListIterable.length","MappedListIterable.elementAt","WhereIterable.iterator","WhereIterable.map","WhereIterator.moveNext","WhereIterator.current","Symbol.hashCode","Symbol.toString","Symbol.==","ConstantMap.toString","ConstantStringMap.length","ConstantStringMap._keys","ConstantStringMap.containsKey","ConstantStringMap.[]","ConstantStringMap.forEach","ConstantStringMap.keys","_KeysOrValues.length","_KeysOrValues.iterator","_KeysOrValuesOrElementsIterator.current","_KeysOrValuesOrElementsIterator.moveNext","_KeysOrValuesOrElementsIterator._current","JSInvocationMirror.memberName","JSInvocationMirror.positionalArguments","JSInvocationMirror.namedArguments","Primitives.functionNoSuchMethod.","TypeErrorDecoder.matchTypeError","NullError.toString","JsNoSuchMethodError.toString","UnknownJsTypeError.toString","NullThrownFromJavaScriptException.toString","_StackTrace.toString","Closure.toString","StaticClosure.toString","BoundClosure.==","BoundClosure.hashCode","BoundClosure.toString","_CyclicInitializationError.toString","RuntimeError.toString","_AssertionError.toString","JsLinkedHashMap.keys","JsLinkedHashMap.length","JsLinkedHashMap.containsKey","JsLinkedHashMap._containsTableEntry","JsLinkedHashMap.[]","JsLinkedHashMap.internalGet","JsLinkedHashMap._getBucket","JsLinkedHashMap.[]=","JsLinkedHashMap.internalSet","JsLinkedHashMap.putIfAbsent","JsLinkedHashMap.remove","JsLinkedHashMap.forEach","JsLinkedHashMap._addHashTableEntry","JsLinkedHashMap._removeHashTableEntry","JsLinkedHashMap._modified","JsLinkedHashMap._newLinkedCell","JsLinkedHashMap._unlinkCell","JsLinkedHashMap.internalComputeHashCode","JsLinkedHashMap.internalFindBucketIndex","JsLinkedHashMap.toString","JsLinkedHashMap._newHashTable","LinkedHashMapKeysIterable.length","LinkedHashMapKeysIterable.iterator","LinkedHashMapKeyIterator.current","LinkedHashMapKeyIterator.moveNext","LinkedHashMapKeyIterator._current","initHooks.","NativeByteBuffer.runtimeType","NativeByteBuffer.asUint8List","NativeByteBuffer.asUint8List[function-entry$0]","NativeTypedData.buffer","NativeTypedData._invalidPosition","NativeTypedData._checkPosition","_UnmodifiableNativeByteBufferView.asUint8List","_UnmodifiableNativeByteBufferView.asUint8List[function-entry$0]","NativeByteData.runtimeType","NativeByteData._setInt8","NativeTypedArray.length","NativeTypedArrayOfDouble.[]","NativeTypedArrayOfInt.setRange","NativeFloat32List.runtimeType","NativeFloat64List.runtimeType","NativeInt16List.runtimeType","NativeInt16List.[]","NativeInt32List.runtimeType","NativeInt32List.[]","NativeInt8List.runtimeType","NativeInt8List.[]","NativeUint16List.runtimeType","NativeUint16List.[]","NativeUint32List.runtimeType","NativeUint32List.[]","NativeUint8ClampedList.runtimeType","NativeUint8ClampedList.length","NativeUint8ClampedList.[]","NativeUint8List.runtimeType","NativeUint8List.length","NativeUint8List.[]","NativeUint8List.sublist","NativeUint8List.sublist[function-entry$1]","Rti._eval","Rti._bind","_rtiBind","_Type.toString","_Error.toString","_AsyncRun._initializeScheduleImmediate.internalCallback","_AsyncRun._initializeScheduleImmediate.","_AsyncRun._scheduleImmediateJsOverride.internalCallback","_AsyncRun._scheduleImmediateWithSetImmediate.internalCallback","_TimerImpl.internalCallback","_AsyncAwaitCompleter.complete","_AsyncAwaitCompleter.completeError","_awaitOnObject.","_wrapJsFunctionForAsync.","AsyncError.toString","_BroadcastSubscription._onPause","_BroadcastSubscription._onResume","_BroadcastSubscription._next","_BroadcastSubscription._previous","_BroadcastStreamController._mayAddEvent","_BroadcastStreamController._subscribe","_DoneStreamSubscription","_BufferingStreamSubscription","_BufferingStreamSubscription._registerDataHandler","_BufferingStreamSubscription.zoned","_BufferingStreamSubscription._registerDoneHandler","_BroadcastSubscription","_BroadcastStreamController._addEventError","_BroadcastStreamController._forEachListener","_BroadcastStreamController._callOnCancel","_BroadcastStreamController._firstSubscription","_BroadcastStreamController._lastSubscription","_SyncBroadcastStreamController._mayAddEvent","_SyncBroadcastStreamController._addEventError","_SyncBroadcastStreamController._sendData","_SyncBroadcastStreamController._sendData.","_SyncBroadcastStreamController__sendData_closure","_Completer.completeError","_Completer.completeError[function-entry$1]","_AsyncCompleter.complete","_FutureListener.matchesErrorTest","_FutureListener._errorTest","_FutureListener.handleError","_Future.then","_Future._thenAwait","_Future._setErrorObject","_Future._cloneResult","_Future._addListener","_Future._prependListeners","_Future._removeListeners","_Future._reverseListeners","_Future._chainForeignFuture","_Future._completeWithValue","_Future._completeWithResultOf","_Future._completeError","_Future._setError","_Future._asyncComplete","_Future._asyncCompleteWithValue","_Future._chainFuture","_Future._asyncCompleteError","_Future._addListener.","_Future._prependListeners.","_Future._chainForeignFuture.","_Future._chainCoreFuture.","_Future._asyncCompleteWithValue.","_Future._asyncCompleteError.","_Future._propagateToListeners.handleWhenCompleteCallback","_FutureListener.handleWhenComplete","_FutureListener._whenCompleteAction","_Future._newFutureWithSameType","_Future._propagateToListeners.handleWhenCompleteCallback.","_Future._propagateToListeners.handleValueCallback","_FutureListener.handleValue","_FutureListener._onValue","_Future._propagateToListeners.handleError","_FutureListener.hasErrorCallback","Stream.length","Stream.length.","Stream_length_closure","_Future._complete","_ControllerStream.hashCode","_ControllerStream.==","_ControllerSubscription._onPause","_ControllerSubscription._onResume","_BufferingStreamSubscription._add","_BufferingStreamSubscription._onPause","_BufferingStreamSubscription._onResume","_BufferingStreamSubscription._addPending","_BufferingStreamSubscription._sendData","_BufferingStreamSubscription._checkState","_BufferingStreamSubscription._mayResumeInput","_BufferingStreamSubscription._pending","_StreamImpl.listen","_StreamImpl.listen[function-entry$1]","_PendingEvents.schedule","_PendingEvents.schedule.","_PendingEvents.handleNext","_DoneStreamSubscription._onMicrotask","_DoneStreamSubscription._onDone","_rootHandleError.","_RootZone.runGuarded","_RootZone.runUnaryGuarded","_RootZone.bindCallbackGuarded","_RootZone.[]","_RootZone.run","_RootZone.runUnary","_RootZone.runBinary","_RootZone.registerBinaryCallback","_RootZone.bindCallbackGuarded.","_HashMap.keys","_HashMap.length","_HashMap.containsKey","_HashMap._containsKey","_HashMap.[]","_HashMap._get","_HashMap.[]=","_IdentityHashMap._computeHashCode","_HashMap.forEach","_HashMap._computeKeys","_HashMap._addHashTableEntry","_HashMap._getBucket","_IdentityHashMap._findBucketIndex","_HashMapKeyIterable.length","_HashMapKeyIterable.iterator","_HashMapKeyIterator.current","_HashMapKeyIterator.moveNext","_HashMapKeyIterator._current","ListBase.iterator","ListBase.elementAt","ListBase.map","ListBase.toString","MapBase.forEach","MapBase.length","MapBase.toString","MapBase.mapToString.","StringBuffer.write","MapView.[]","MapView.forEach","MapView.length","MapView.keys","MapView.toString","Base64Encoder.convert","_Base64Encoder.encode","Base64Decoder.convert","_Base64Decoder.decode","NoSuchMethodError.toString.","_symbolToString","DateTime.==","DateTime.hashCode","DateTime.toString","_Enum.toString","Error.stackTrace","AssertionError.toString","ArgumentError._errorName","ArgumentError._errorExplanation","ArgumentError.toString","RangeError.invalidValue","RangeError._errorName","RangeError._errorExplanation","IndexError.invalidValue","IndexError._errorName","IndexError._errorExplanation","NoSuchMethodError.toString","UnsupportedError.toString","UnimplementedError.toString","StateError.toString","ConcurrentModificationError.toString","OutOfMemoryError.toString","OutOfMemoryError.stackTrace","StackOverflowError.toString","StackOverflowError.stackTrace","_Exception.toString","FormatException.toString","Iterable.map","Iterable.length","Iterable.elementAt","Iterable.toString","Null.hashCode","Null.toString","Object.hashCode","Object.==","Object.toString","Object.noSuchMethod","Object.runtimeType","_StringStackTrace.toString","StringBuffer.length","StringBuffer.toString","jsify._convert","promiseToFuture.","dartify.convert","DateTime._withValueChecked","_dateToDateTime","NullRejectionException.toString","_JSSecureRandom","_JSSecureRandom.nextInt","NativeByteData.setUint32","CryptorError._enumToString","FrameCryptor.enabled","FrameCryptor.setupTransform","FrameCryptor.setupTransform[function-entry$0$kind$operation$readable$trackId$writable]","FrameCryptor.postMessage","FrameCryptor.getUnencryptedBytes","FrameCryptor.readFrameInfo","FrameCryptor.enqueueFrame","FrameCryptor.encodeFunction","FrameCryptor.makeIv","NativeByteData.setInt8","BytesBuilder","FrameCryptor.decodeFunction","DateTime._now","DateTime.now","ParticipantKeyHandler.decryptionSuccess","FrameCryptor.decodeFunction.decryptFrameInternal","FrameCryptor.decodeFunction.ratchedKeyInternal","KeyOptions.toString","KeyProvider.getParticipantKeyHandler","ListBase.isNotEmpty","KeyProvider.getSharedKeyHandler","ParticipantKeyHandler.decryptionFailure","ParticipantKeyHandler.exportKey","ParticipantKeyHandler.ratchetKey","ParticipantKeyHandler.ratchetMaterial","ParticipantKeyHandler.getKeySet","ParticipantKeyHandler.setKey","ParticipantKeyHandler.setKey[function-entry$1]","ParticipantKeyHandler.setKeySetFromMaterial","ParticipantKeyHandler.deriveKeys","ParticipantKeyHandler.ratchet","ParticipantKeyHandler._#ParticipantKeyHandler#cryptoKeyRing#A","SifGuard.recordUserFrame","SifGuard.reset","getTrackCryptor.","unsetCryptorParticipant.","main.","print","base64Decode","Base64Codec.decode","KeyProvider","JSArray.where","FrameCryptor.setEnabled","KeyProvider.setSharedKey","base64Encode","Codec.encode","FrameCryptor.setKeyIndex","FrameCryptor.setSifTrailer","FrameCryptor.updateCodec","main..","Level.==","Level.hashCode","Level.toString","LogRecord.toString","Logger.fullName","JSString.isNotEmpty","Logger.level","Logger.log","Logger.isLoggable","Logger._getStream","StreamController.broadcast","_BroadcastStreamController.stream","Logger._publish","Logger._controller","Logger.","Logger._named","Logger._internal","DART_CLOSURE_PROPERTY_NAME","_CopyingBytesBuilder._emptyList","TypeErrorDecoder.noSuchMethodPattern","TypeErrorDecoder.notClosurePattern","TypeErrorDecoder.nullCallPattern","TypeErrorDecoder.nullLiteralCallPattern","TypeErrorDecoder.undefinedCallPattern","TypeErrorDecoder.undefinedLiteralCallPattern","TypeErrorDecoder.nullPropertyPattern","TypeErrorDecoder.nullLiteralPropertyPattern","TypeErrorDecoder.undefinedPropertyPattern","TypeErrorDecoder.undefinedLiteralPropertyPattern","_AsyncRun._scheduleImmediateClosure","_Base64Decoder._inverseAlphabet","NativeInt8List.fromList","_Base64Decoder._emptyBuffer","_hashSeed","Random._secureRandom","logger","Logger.root","","ArrayIterator","AsyncError","Base64Codec","Base64Decoder","Base64Encoder","BoundClosure","ByteBuffer","ByteData","Closure","Closure0Args","Closure2Args","Codec","ConstantMap","ConstantMapView","ConstantStringMap","Converter","CryptorError","DateTime","EfficientLengthIterable","EfficientLengthMappedIterable","Error","ExceptionAndStackTrace","FixedLengthListMixin","Float32List","Float64List","FrameCryptor_decodeFunction_decryptFrameInternal","FrameCryptor_decodeFunction_ratchedKeyInternal","FrameInfo","Function","Future","IndexError","Int16List","Int32List","Int8List","Interceptor","Invocation","Iterable","IterableExtension|firstWhereOrNull","Iterator","JSArray","JSBool","JSInt","JSInvocationMirror","JSNull","JSNumNotInt","JSNumber","JSObject","JSString","JSUnmodifiableArray","JS_CONST","JavaScriptBigInt","JavaScriptFunction","JavaScriptIndexingBehavior","JavaScriptObject","JavaScriptSymbol","JsLinkedHashMap","KeyOptions","KeySet","LateError","LegacyJavaScriptObject","Level","LinkedHashMap","LinkedHashMapCell","LinkedHashMapKeyIterator","LinkedHashMapKeysIterable","List","ListBase","ListIterable","ListIterator","LogRecord","Logger_Logger_closure","Map","MapBase","MapBase_mapToString_closure","MapView","MappedIterator","MappedListIterable","NativeByteBuffer","NativeFloat32List","NativeFloat64List","NativeInt16List","NativeInt32List","NativeInt8List","NativeTypedArray","NativeTypedArrayOfDouble","NativeTypedArrayOfInt","NativeTypedData","NativeUint16List","NativeUint32List","NativeUint8ClampedList","NoSuchMethodError","NoSuchMethodError_toString_closure","Null","NullError","NullRejectionException","NullThrownFromJavaScriptException","Object","OutOfMemoryError","Pattern","PlainJavaScriptObject","Primitives_functionNoSuchMethod_closure","RangeError","Record","Rti","RuntimeError","SentinelValue","SifGuard","StackOverflowError","StackTrace","StaticClosure","Stream","StreamController","StreamSubscription","String","StringBuffer","Symbol","TearOffClosure","TrustedGetRuntimeType","TypeError","TypeErrorDecoder","Uint16List","Uint32List","Uint8ClampedList","Uint8List","UnknownJavaScriptObject","UnknownJsTypeError","UnmodifiableMapView","WhereIterable","WhereIterator","Zone","_AddStreamState","_AssertionError","_AsyncAwaitCompleter","_AsyncCallbackEntry","_AsyncCompleter","_AsyncRun__initializeScheduleImmediate_closure","_AsyncRun__initializeScheduleImmediate_internalCallback","_AsyncRun__scheduleImmediateJsOverride_internalCallback","_AsyncRun__scheduleImmediateWithSetImmediate_internalCallback","_Base64Decoder","_Base64Encoder","_BroadcastStream","_BroadcastStreamController","_Completer","_ControllerStream","_ControllerSubscription","_CopyingBytesBuilder","_CyclicInitializationError","_DelayedData","_DelayedEvent","_Enum","_Error","_EventDispatch","_Exception","_FunctionParameters","_Future","_FutureListener","_Future__addListener_closure","_Future__asyncCompleteError_closure","_Future__asyncCompleteWithValue_closure","_Future__chainCoreFuture_closure","_Future__chainForeignFuture_closure","_Future__prependListeners_closure","_Future__propagateToListeners_handleError","_Future__propagateToListeners_handleValueCallback","_Future__propagateToListeners_handleWhenCompleteCallback","_Future__propagateToListeners_handleWhenCompleteCallback_closure","_HashMap","_HashMapKeyIterable","_HashMapKeyIterator","_IdentityHashMap","_JS_INTEROP_INTERCEPTOR_TAG","_KeysOrValues","_KeysOrValuesOrElementsIterator","_NativeTypedArrayOfDouble&NativeTypedArray&ListMixin","_NativeTypedArrayOfDouble&NativeTypedArray&ListMixin&FixedLengthListMixin","_NativeTypedArrayOfInt&NativeTypedArray&ListMixin","_NativeTypedArrayOfInt&NativeTypedArray&ListMixin&FixedLengthListMixin","_PendingEvents","_PendingEvents_schedule_closure","_Required","_RootZone","_RootZone_bindCallbackGuarded_closure","_StackTrace","_StreamControllerLifecycle","_StreamImpl","_StreamIterator","_StringStackTrace","_SyncBroadcastStreamController","_TimerImpl_internalCallback","_TypeError","_UnmodifiableMapMixin","_UnmodifiableMapView&MapView&_UnmodifiableMapMixin","_UnmodifiableNativeByteBufferView","_Zone","_allocateBuffer","_awaitOnObject_closure","_canonicalRecipeJoin","_canonicalRecipeJoinNamed","_chainCoreFuture","_checkPadding","_computeFieldNamed","_computeSignatureFunctionNewRti","_createFutureOrRti","_createGenericFunctionRti","_createQuestionRti","_createStarRti","_current","_empty","_emptyBuffer","_emptyList","_fourDigits","_generalApplyFunction","_getCanonicalRecipe","_getFutureFromFutureOr","_getQuestionFromStar","_getTableEntry","_identityHashCodeProperty","_initializeScheduleImmediate","_installTypeTests","_interceptorFieldNameCache","_inverseAlphabet","_isInCallbackLoop","_isUnionOfFunctionType","_lastCallback","_lastPriorityCallback","_literal","_loggers","_lookupBindingRti","_lookupFunctionRti","_lookupFutureOrRti","_lookupGenericFunctionParameterRti","_lookupGenericFunctionRti","_lookupInterfaceRti","_lookupQuestionRti","_lookupRecordRti","_lookupStarRti","_lookupTerminalRti","_newHashTable","_nextCallback","_nextNumber","_objectTypeNameNewRti","_of","_propagateToListeners","_receiverFieldNameCache","_rootHandleError_closure","_scheduleImmediateClosure","_scheduleImmediateJsOverride","_scheduleImmediateWithSetImmediate","_scheduleImmediateWithTimer","_secureRandom","_setTableEntry","_stringFromUint8List","_threeDigits","_throw","_trimPaddingChars","_twoDigits","_wrapJsFunctionForAsync_closure","_writeAll","addErasedTypes","addRules","alternateTagFunction","applyFunction","async__AsyncRun__scheduleImmediateJsOverride$closure","async__AsyncRun__scheduleImmediateWithSetImmediate$closure","async__AsyncRun__scheduleImmediateWithTimer$closure","async___nullDoneHandler$closure","async___nullErrorHandler$closure","async___startMicrotaskLoop$closure","bind","bool","checkNotNegative","checkValidRange","collectArray","combine","compose","create","cspForwardCall","cspForwardInterceptedCall","current","dartify_convert","decodeChunk","defaultStackTrace","dispatchRecordsForInstanceTags","double","encodeChunk","eval","evalInEnvironment","evalRecipe","extractPattern","extractStackTrace","filled","findErasedType","findRule","finish","fixed","forType","forwardCallTo","forwardInterceptedCallTo","fromCharCodes","fromMessage","fromTearOff","functionNoSuchMethod","getDay","getHours","getInterceptor$","getInterceptor$asx","getInterceptor$ax","getInterceptor$x","getMilliseconds","getMinutes","getMonth","getSeconds","getTagFunction","getTrackCryptor_closure","getYear","handleArguments","handleDigit","handleExtendedOperations","handleIdentifier","handleTypeArguments","hash","indexToType","initHooks_closure","initNativeDispatchFlag","int","interceptorOf","interceptorsForUncacheableTags","iterableToFullString","iterableToShortString","jsify__convert","keyProviders","lazyAsJsDate","main__closure","main_closure","mapToString","markFixed","newArrayOrEmpty","noSuchMethodPattern","notClosurePattern","nullCallPattern","nullLiteralCallPattern","nullLiteralPropertyPattern","nullPropertyPattern","num","objectAssign","objectTypeName","of","parse","participantCryptors","promiseToFuture_closure","prototypeForTagFunction","provokeCallErrorOn","provokePropertyErrorOn","range","receiverOf","root","safeToString","stringFromNativeUint8List","throwWithStackTrace","toStringVisiting","toType","toTypes","toTypesNamed","trySetStackTrace","undefinedCallPattern","undefinedLiteralCallPattern","undefinedLiteralPropertyPattern","undefinedPropertyPattern","unsetCryptorParticipant_closure","value","view","withInvocation","withLength","$add","$and","$div","$eq","$ge","$gt","$index","$indexSet","$le","$lt","$mod","$mul","$negate","$or","$shl","$shr","$sub","$tdiv","$xor","%","*","==","[]","[]=","_addEventError","_captured_T_1","_captured__convertedObjects_0","_captured_arguments_2","_captured_bodyFunction_0","_captured_completer_0","_captured_data_1","_captured_decryptFrameInternal_3","_captured_dispatch_1","_captured_div_1","_captured_e_1","_captured_f_1","_captured_getTag_0","_captured_getUnknownTag_0","_captured_handleMessage_0","_captured_hasError_2","_captured_headerLength_5","_captured_ivLength_6","_captured_iv_3","_captured_joinedResult_0","_captured_namedArgumentList_1","_captured_originalSource_1","_captured_protected_0","_captured_prototypeForTag_0","_captured_s_2","_captured_sb_1","_captured_sourceResult_1","_captured_span_2","_captured_srcFrame_4","_captured_target_1","_captured_this_0","abs","add","addAll","argumentCount","asUint8List","bindCallback","bindCallbackGuarded","buffer","call","callback","ceilToDouble","checkGrowable","children","close","code","codeUnitAt","codec","comma","complete","completeError","config","consecutiveSifCount","contains","containsKey","convert","count","createBuffer","cryptoKeyRing","currentKeyIndex","currentkeySet","dart:_interceptors#_addAllFromArray","dart:_interceptors#_codeUnitAt","dart:_interceptors#_current=","dart:_interceptors#_index","dart:_interceptors#_isInt32","dart:_interceptors#_iterable","dart:_interceptors#_length","dart:_interceptors#_shlPositive","dart:_interceptors#_shrBothPositive","dart:_interceptors#_shrOtherPositive","dart:_interceptors#_shrReceiverPositive","dart:_interceptors#_tdivFast","dart:_interceptors#_tdivSlow","dart:_interceptors#_toListFixed","dart:_interceptors#_toListGrowable","dart:_internal#_buffer","dart:_internal#_current=","dart:_internal#_f","dart:_internal#_grow","dart:_internal#_index","dart:_internal#_iterable","dart:_internal#_iterator","dart:_internal#_length","dart:_internal#_message","dart:_internal#_name","dart:_internal#_source","dart:_js_helper#_addHashTableEntry","dart:_js_helper#_arguments","dart:_js_helper#_argumentsExpr","dart:_js_helper#_box_0","dart:_js_helper#_captured_arguments_2","dart:_js_helper#_captured_getTag_0","dart:_js_helper#_captured_getUnknownTag_0","dart:_js_helper#_captured_namedArgumentList_1","dart:_js_helper#_captured_prototypeForTag_0","dart:_js_helper#_cell","dart:_js_helper#_containsTableEntry","dart:_js_helper#_current=","dart:_js_helper#_deleteTableEntry","dart:_js_helper#_elements","dart:_js_helper#_exception","dart:_js_helper#_expr","dart:_js_helper#_first","dart:_js_helper#_getBucket","dart:_js_helper#_getTableBucket","dart:_js_helper#_getTableCell","dart:_js_helper#_index","dart:_js_helper#_interceptor","dart:_js_helper#_internalName","dart:_js_helper#_irritant","dart:_js_helper#_jsIndex","dart:_js_helper#_keys","dart:_js_helper#_kind","dart:_js_helper#_last","dart:_js_helper#_length","dart:_js_helper#_map","dart:_js_helper#_memberName","dart:_js_helper#_message","dart:_js_helper#_method","dart:_js_helper#_modifications","dart:_js_helper#_modified","dart:_js_helper#_name","dart:_js_helper#_namedArgumentNames","dart:_js_helper#_newHashTable","dart:_js_helper#_newLinkedCell","dart:_js_helper#_next","dart:_js_helper#_nums","dart:_js_helper#_pattern","dart:_js_helper#_previous","dart:_js_helper#_receiver","dart:_js_helper#_removeHashTableEntry","dart:_js_helper#_rest","dart:_js_helper#_setKeys","dart:_js_helper#_setTableEntry","dart:_js_helper#_strings","dart:_js_helper#_target","dart:_js_helper#_trace","dart:_js_helper#_typeArgumentCount","dart:_js_helper#_unlinkCell","dart:_js_helper#_values","dart:_native_typed_data#_checkMutable","dart:_native_typed_data#_checkPosition","dart:_native_typed_data#_data","dart:_native_typed_data#_getUint32","dart:_native_typed_data#_invalidPosition","dart:_native_typed_data#_isUnmodifiable","dart:_native_typed_data#_nativeBuffer","dart:_native_typed_data#_setInt8","dart:_native_typed_data#_setRangeFast","dart:_native_typed_data#_setUint32","dart:_rti#_as","dart:_rti#_bind","dart:_rti#_bindCache","dart:_rti#_cachedRuntimeType","dart:_rti#_canonicalRecipe","dart:_rti#_dynamicCheckData","dart:_rti#_eval","dart:_rti#_evalCache","dart:_rti#_is","dart:_rti#_isSubtypeCache","dart:_rti#_kind","dart:_rti#_message","dart:_rti#_named","dart:_rti#_optionalPositional","dart:_rti#_precomputed1","dart:_rti#_primary","dart:_rti#_requiredPositional","dart:_rti#_rest","dart:_rti#_rti","dart:_rti#_specializedTestResource","dart:async#_add","dart:async#_addEventError","dart:async#_addListener","dart:async#_addPending","dart:async#_addStreamState","dart:async#_asyncComplete","dart:async#_asyncCompleteError","dart:async#_asyncCompleteWithValue","dart:async#_box_0","dart:async#_box_1","dart:async#_callOnCancel","dart:async#_canFire","dart:async#_cancelFuture","dart:async#_captured_bodyFunction_0","dart:async#_captured_callback_0","dart:async#_captured_callback_1","dart:async#_captured_data_1","dart:async#_captured_dispatch_1","dart:async#_captured_div_1","dart:async#_captured_e_1","dart:async#_captured_error_0","dart:async#_captured_error_1","dart:async#_captured_f_1","dart:async#_captured_future_1","dart:async#_captured_hasError_2","dart:async#_captured_joinedResult_0","dart:async#_captured_listener_1","dart:async#_captured_originalSource_1","dart:async#_captured_protected_0","dart:async#_captured_s_2","dart:async#_captured_sourceResult_1","dart:async#_captured_span_2","dart:async#_captured_stackTrace_1","dart:async#_captured_stackTrace_2","dart:async#_captured_target_1","dart:async#_captured_this_0","dart:async#_captured_this_1","dart:async#_captured_value_1","dart:async#_chainForeignFuture","dart:async#_chainFuture","dart:async#_chainSource","dart:async#_checkState","dart:async#_clearPendingComplete","dart:async#_cloneResult","dart:async#_complete","dart:async#_completeError","dart:async#_completeWithResultOf","dart:async#_completeWithValue","dart:async#_controller","dart:async#_createSubscription","dart:async#_doneFuture","dart:async#_error","dart:async#_errorTest","dart:async#_eventScheduled","dart:async#_eventState","dart:async#_expectsEvent","dart:async#_firstSubscription=","dart:async#_forEachListener","dart:async#_future","dart:async#_handle","dart:async#_hasError","dart:async#_hasOneListener","dart:async#_hasPending","dart:async#_hasValue","dart:async#_ignoreError","dart:async#_isCanceled","dart:async#_isChained","dart:async#_isComplete","dart:async#_isEmpty","dart:async#_isFiring","dart:async#_isInputPaused","dart:async#_isPaused","dart:async#_lastSubscription","dart:async#_mayAddEvent","dart:async#_mayAddListener","dart:async#_mayComplete","dart:async#_mayResumeInput","dart:async#_newFutureWithSameType","dart:async#_next=","dart:async#_nextListener","dart:async#_onData","dart:async#_onDone=","dart:async#_onError","dart:async#_onListen","dart:async#_onMicrotask","dart:async#_onPause","dart:async#_onResume","dart:async#_onValue","dart:async#_once","dart:async#_pending","dart:async#_prependListeners","dart:async#_previous=","dart:async#_recordPause","dart:async#_recordResume","dart:async#_removeAfterFiring","dart:async#_removeListener","dart:async#_removeListeners","dart:async#_resultOrListeners","dart:async#_reverseListeners","dart:async#_scheduleMicrotask","dart:async#_sendData","dart:async#_setChained","dart:async#_setError","dart:async#_setErrorObject","dart:async#_setPendingComplete","dart:async#_setValue","dart:async#_state","dart:async#_stateData","dart:async#_subscribe","dart:async#_subscription","dart:async#_thenAwait","dart:async#_tick","dart:async#_toggleEventId","dart:async#_whenCompleteAction","dart:async#_zone","dart:collection#_addHashTableEntry","dart:collection#_box_0","dart:collection#_captured_result_1","dart:collection#_computeHashCode","dart:collection#_computeKeys","dart:collection#_containsKey","dart:collection#_current=","dart:collection#_findBucketIndex","dart:collection#_get","dart:collection#_getBucket","dart:collection#_keys","dart:collection#_length","dart:collection#_map","dart:collection#_nums","dart:collection#_offset","dart:collection#_remove","dart:collection#_removeHashTableEntry","dart:collection#_rest","dart:collection#_set","dart:collection#_strings","dart:convert#_alphabet","dart:convert#_encoder","dart:convert#_state","dart:convert#_urlSafe","dart:core#_arguments","dart:core#_box_0","dart:core#_captured_sb_1","dart:core#_contents","dart:core#_enumToString","dart:core#_errorExplanation","dart:core#_errorName","dart:core#_existingArgumentNames","dart:core#_hasValue","dart:core#_memberName","dart:core#_microsecond","dart:core#_name","dart:core#_namedArguments","dart:core#_receiver","dart:core#_stackTrace","dart:core#_value","dart:core#_writeString","dart:js_util#_captured_T_1","dart:js_util#_captured__convertedObjects_0","dart:js_util#_captured_completer_0","dart:math#_buffer","dart:math#_getRandomBytes","dartException","day","decode","decodeFunction","decoder","decrypted","decryptionFailure","decryptionSuccess","deriveKeys","discardFrameWhenCryptorNotReady","elementAt","enabled","encode","encodeFunction","encoder","encryptionKey","end","endsWith","enqueueFrame","error","errorCallback","errorZone","exportKey","failureTolerance","fine","finer","first","firstPendingEvent","floorToDouble","forEach","frameType","fullName","future","getKeySet","getParticipantKeyHandler","getRange","getSharedKeyHandler","getUint32","getUnencryptedBytes","handleError","handleNext","handleUncaughtError","handleValue","handleWhenComplete","handlesComplete","handlesError","handlesValue","hasErrorCallback","hasErrorTest","hasValidKey","hashCode","hashMapCellKey","hashMapCellValue","hour","id","inSameErrorZone","index","indexable","info","initialKeyIndex","initialKeySet","internalComputeHashCode","internalContainsKey","internalFindBucketIndex","internalGet","internalRemove","internalSet","invalidValue","isAccessor","isClosed","isEmpty","isGetter","isLoggable","isNotEmpty","isScheduled","isSifAllowed","isSync","isUndefined","isUtc","iterator","join","keyHandler","keyOptions","keyProviderOptions","keyRingSze","keys","kind","lastError","lastIndexOf","lastPendingEvent","lastSifReceivedAt","length","lengthInBytes","level","listen","listener","listenerHasError","listenerValueOrError","listeners","log","loggerName","makeIv","map","matchAsPrefix","matchTypeError","matchesErrorTest","material","memberName","message","microsecond","millisecond","millisecondsSinceEpoch","minute","modifiedObject","month","moveNext","name","namedArguments","names","next","nextInt","noSuchMethod","object","offset","offsetInBytes","onCancel","onListen","onRecord","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_#FrameCryptor#kind#A","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_box_0","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_box_1","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_decryptFrameInternal_3","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_headerLength_5","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_ivLength_6","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_iv_3","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_srcFrame_4","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_this_2","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_enabled","package:dart_webrtc/src/e2ee.worker/e2ee.keyhandler.dart#_#ParticipantKeyHandler#cryptoKeyRing#A","package:dart_webrtc/src/e2ee.worker/e2ee.keyhandler.dart#_decryptionFailureCount","package:dart_webrtc/src/e2ee.worker/e2ee.keyhandler.dart#_hasValidKey","package:dart_webrtc/src/e2ee.worker/e2ee.worker.dart#_captured_handleMessage_0","package:dart_webrtc/src/e2ee.worker/e2ee.worker.dart#_captured_trackId_0","package:logging/src/logger.dart#_captured_name_0","package:logging/src/logger.dart#_children","package:logging/src/logger.dart#_controller","package:logging/src/logger.dart#_getStream","package:logging/src/logger.dart#_level","package:logging/src/logger.dart#_levelChangedController","package:logging/src/logger.dart#_publish","padLeft","parent","participantIdentity","participantKeys","perform","positionalArguments","postMessage","putIfAbsent","ratchet","ratchetCount","ratchetKey","ratchetMaterial","ratchetSalt","ratchetWindowSize","readFrameInfo","recordSif","recordUserFrame","registerBinaryCallback","registerCallback","registerUnaryCallback","remainder","remove","removeLast","reset","resetKeyStatus","result","run","runBinary","runGuarded","runUnary","runUnaryGuarded","runtimeType","schedule","second","sendCounts","sequenceNumber","setEnabled","setInt8","setKey","setKeyIndex","setKeySetFromMaterial","setParticipant","setRange","setSharedKey","setSifTrailer","setUint32","setupTransform","sharedKey","sharedKeyHandler","shouldChain","sifGuard","sifSequenceStartedAt","skip","source","ssrc","stackTrace","start","startsWith","state","storedCallback","stream","sublist","substring","take","then","time","timestamp","toBytes","toInt","toList","toLowerCase","toRadixString","toString","trackId","truncateToDouble","uncryptedMagicBytes","unsetParticipant","updateCodec","userFramesSinceSif","variableName","warning","where","worker","write","writeAll","year","zone","Rti._unstar","isTopType","_Universe._canonicalRecipeOfStar","_Universe._canonicalRecipeOfQuestion","_Universe._canonicalRecipeOfFutureOr","_Universe._canonicalRecipeOfBinding","_Universe._canonicalRecipeOfGenericFunction","Error._stringToSafeString","_HashMap._set","_Base64Encoder.createBuffer","DateTime.fromMillisecondsSinceEpoch","SifGuard.recordSif","SifGuard.isSifAllowed",">=","ByteBufferToJSArrayBuffer|get#toJS","JSAnyUtilityExtension|dartify","JSNumberToNumber|get#toDartInt","JSObjectUnsafeUtilExtension|getProperty","JSPromiseToFuture|get#toDart","NullableObjectUtilExtension|jsify","_","_addListener","_asCheck","_buffer","_callConstructorUnchecked1","_callMethodUnchecked0","_callMethodUnchecked1","_callMethodUnchecked2","_callMethodUnchecked3","_canonicalRecipeOfBinding","_canonicalRecipeOfFunction","_canonicalRecipeOfFunctionParameters","_canonicalRecipeOfFutureOr","_canonicalRecipeOfGenericFunction","_canonicalRecipeOfInterface","_canonicalRecipeOfQuestion","_canonicalRecipeOfRecord","_canonicalRecipeOfStar","_chainSource","_checkMutable","_cloneResult","_complete","_completeError","_computeHashCode","_computeIdentityHashCodeProperty","_containsTableEntry","_create1","_create2","_create3","_createBindingRti","_createFunctionRti","_createGenericFunctionParameterRti","_createInterfaceRti","_createLength","_createRecordRti","_createSubscription","_createTerminalRti","_createTimer","_error","_errorTest","_expectsEvent","_failedAsCheckError","_findRule","_future","_getBindCache","_getBindingArguments","_getBindingBase","_getBucket","_getCachedRuntimeType","_getEvalCache","_getFunctionParameters","_getFutureOrArgument","_getGenericFunctionBase","_getGenericFunctionBounds","_getGenericFunctionParameterIndex","_getInterfaceName","_getInterfaceTypeArguments","_getIsSubtypeCache","_getKind","_getNamed","_getOptionalPositional","_getPrimary","_getPropertyTrustType","_getQuestionArgument","_getRandomBytes","_getRecordFields","_getRecordPartialShapeTag","_getRequiredPositional","_getReturnType","_getRuntimeTypeOfArrayAsRti","_getSpecializedTestResource","_getStarArgument","_getTableBucket","_getTableCell","_grow","_handleIEtoString","_hasError","_hasOneListener","_hasPending","_hasTableEntry","_hasTimer","_installRti","_internal","_isCanceled","_isChained","_isCheck","_isClosure","_isComplete","_isDartObject","_isEmpty","_isFiring","_isInputPaused","_isSubtypeUncached","_keysFromIndex","_lookupAnyRti","_lookupDynamicRti","_lookupErasedRti","_lookupFutureRti","_lookupNeverRti","_lookupVoidRti","_mayAddListener","_mayComplete","_mayResumeInput","_name","_named","_newFutureWithSameType","_now","_objectToString","_ofArray","_onError","_onValue","_parseRecipe","_pow2roundup","_recipeJoin","_registerDataHandler","_registerDoneHandler","_removeListener","_removeListeners","_scheduleImmediate","_set","_setAsCheckFunction","_setBindCache","_setCachedRuntimeType","_setCanonicalRecipe","_setChained","_setError","_setErrorObject","_setEvalCache","_setIsTestFunction","_setKind","_setNamed","_setOptionalPositional","_setPrecomputed1","_setPrimary","_setRangeFast","_setRequiredPositional","_setRest","_setSpecializedTestResource","_setValue","_stateBits","_statePadding","_stringToSafeString","_target","_theUniverse","_trySetStackTrace","_unstar","_validate","_whenCompleteAction","_withValueChecked","_writeOne","_writeString","_zone","allocate","apply","arrayAt","arrayConcat","arrayLength","arraySplice","asBool","asInt","asRti","asRtiOrNull","asString","as_Type","broadcast","charCodeAt","collectNamed","constructorNameFallback","dateNow","dispatchRecordExtension","dispatchRecordIndexability","dispatchRecordInterceptor","dispatchRecordProto","environment","erasedTypes","evalCache","evalTypeVariable","fieldADI","fieldNI","fromList","fromMillisecondsSinceEpoch","getDispatchProperty","getIndex","getLegacyErasedRecipe","getLength","getName","getProperty","getRuntimeTypeOfInterceptorNotArray","handleNamedGroup","handleOptionalGroup","handleStartRecord","hash2","identityHashCode","instanceTypeName","interceptorFieldName","interceptorsByTag","isArray","isDigit","isIdentical","isJavaScriptSimpleObject","isRequired","jsHasOwnProperty","jsonEncodeNative","leafTags","listToString","lookupSupertype","lookupTypeVariable","mapGet","mapSet","markFixedList","markGrowable","markUnmodifiableList","normalize","now","objectKeys","objectToHumanReadableString","pop","position","pow","printToConsole","propertyGet","provokeCallErrorOnNull","provokeCallErrorOnUndefined","provokePropertyErrorOnNull","provokePropertyErrorOnUndefined","push","pushStackFrame","receiverFieldName","recipe","secure","sharedEmptyArray","stack","staticInteropGlobalContext","stringIndexOf","stringLastIndexOfUnchecked","stringSafeToString","stringSplit","thenAwait","toGenericFunctionParameter","tryStringifyException","typeRules","typed","universe","unmangleGlobalNameIfPreservedAnyways","unsafeCast","unvalidated","writeFinalChunk","zoned"], + "mappings": "A;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;sBA4FAA;MA6BEA,gEAEFA;K;wBASAC;;uBApDSA,KACiBA;MAsDxBA;aACMA;UACFA;yBAzDGA,KACiBA;;MA6DxBA;sBAhB6BA;QAkB3BA;UAAoBA,aAnBaA,EA0ErCA;QAtDIA;UAAmBA,aAsDvBA;QArDsBA;QAClBA;UACEA,aAvB+BA,EA0ErCA;kBAxEmCA;UA8B7BA,sBAAMA,kDAA4CA,IAD3BA;;2BAOTA;;QAEdA;;cAuCGC;;UCqpFAC,yCADgBA;kCD9oFjBF;;MA7CNA;QAAyBA,kBAkC3BA;MA9BgBA;MACdA;QAAyBA,kBA6B3BA;MAvBEA;QAIEA,QAHcA,2BAsBlBA;MAjBcA;MACZA;QAEEA,QAIcA,8BAUlBA;;QAPIA,QAHcA,8BAUlBA;MALEA;cAUOG;;UCqpFAD,yCADgBA;QCzxFvBC,iDF+HOH;QAFLA,QAEKA,gCACTA;;MADEA,QAAOA,gCACTA;K;yBG/KUI;MAWNA;QACEA,sBAAiBA;MAEnBA,OAAOA,4BAAqBA,uBAC9BA;K;6BA2EQC;MACkCA;;MAAtCA,SAAoEA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;sBCmE7DC;MACFA;MACAA;MACPA,wBACFA;K;qBAEWC;MACFA;MACAA;MACPA,gDACFA;K;oBA+oBAC;MAIAA,YACFA;K;sBA0SKC;MACHA;iBAAoBA,iBAAiBA,gBAArCA;wBAAoBA,iBACIA;UAAsBA,WAGhDA;MADEA,YACFA;K;iCCrwBUC;MACOA;QACXA,OAsBJA,sIAnBAA;MADEA,OAGFA,wGAFAA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBJ9RKC;0BKhFOA,mBACLA;MLiFPA;QAAuBA,gBAGzBA;MADEA,mBACFA;K;iBAuBKC;MACHA;;uBDF0CA;QCIxCA;UAAoBA,aAGxBA;;MADEA,OAAcA,oDAChBA;K;KAEOC;MACLA;;QAAqBA,YAmBvBA;MAlBEA;QACEA;UAEEA,iBAeNA;aAbSA;QACLA,aAYJA;WAXSA;QACLA,cAUJA;WATSA;QACLA,aAQJA;MANeA;MAKbA,aACFA;K;6BA2HaC;;oBAELA;;QAUFA;mBATUA;MACZA;;QAgJOC;;MA5IPD,WACFA;K;6BA0IcC;MACZA,iDACFA;K;oCAOcC;MACRA;MMgnBCA,uBNhnBuBA;QAG1BA,sBM4mBMA,6BNxkBVA;MAjCoBA;MAGPA,qBAFgBA,yCACAA;QCvLtBA,gBACHA;QDyMAA;UAAwCA,mBAY5CA;6BAXsBA;QAClBA;wCACwBA;UACtBA;YAEEA,sBAMRA;;;MADEA,OM0kBKA,eADGA,6BNxkBVA;K;2BAecC;MACkCA;QAC5CA,OAAOA,qBAcXA;MAZEA;QACEA,OAs2EGC,sBA31EPD;MAPWA;QAAPA,2BAOJA;MADEA,yBAvBcA,yCAwBhBA;K;wCAyFcE;MAGZA;MACSA,kDAD8CA;QACrDA,iDAcJA;MAXEA;QACkBA;QAOZA;;;MAENA,aACFA;K;2BA4HOC;;yCOljB2BA;MPujBhCA,eAAOA,KACTA;K;sBAmBWC;MACTA,eAAiBA,SAC4BA,2DACHA,qDAC5CA;K;uBAKWC;MACTA,eAAiBA,SAC4BA,wDACHA,kDAC5CA;K;qBAKWC;MACTA,eAAiBA,SAC6BA,uDACHA,iDAC7CA;K;uBAKWC;MACTA,eAAiBA,SAC8BA,wDACHA,kDAC9CA;K;yBAKWC;MACTA,eAAiBA,SACgCA,0DACHA,oDAChDA;K;yBAKWC;MACTA,eAAiBA,SACgCA,0DACHA,oDAChDA;K;8BAKWC;MACTA,eAAiBA,SAEoCA,+DACFA,yDACrDA;K;mCA2BOC;MAEDA;;MAMFA;MAiBkDA;QAlBlDA,oCAAqCA;MACrCA;QAGKA;kDQjzBWA;QRmzBhBA,4BAAuBA;MAWzBA,OAAOA,6BAvoBTC,0BAwoBMD,mDACNA;K;4BAiCOE;MAGLA;MAAwBA;qDQp2BNA;;QRo2BiBA;MAAnCA;2CAGgCA;QAC9BA;UAGWA;YAAPA,yBAiDRA;eA/CWA;UAGIA;YAAPA,+CA4CRA;eA1CWA;UAGIA;YAAPA,uEAuCRA;eApCWA;UAGIA;YAAPA,+FAiCRA;eA9BWA;UAGIA;YAAPA,uHA2BRA;eAxBWA;UAGIA;YAAPA,+IAqBRA;0BAPiBA;QACbA;UACEA,OAAOA,4CAKbA;;MADEA,OAAOA,kFACTA;K;oCAEOC;MAIqBA;gFAGLA;kCAMSA;0CAEDA;MAG7BA;QACEA,OAAOA,wEAuGXA;sCApG6BA;MAGKA;MAKDA;MAEbA;8BAEdA;MACJA;gCAGeA;MAGfA;QAIWA,4CQ38BOC;UR28BdD,+EA6ENA;QA3EIA;UACEA,OAAOA,uCA0EbA;QAxEIA,OAAOA,wEAwEXA;;MArEkDA;QAMrCA,4CQz9BOC;URy9BdD,+EA+DNA;6DA5DyBA;QAErBA;UAEEA,OAAOA,8DAwDbA;QAtDIA;UACyBA;UAEvBA;YAEmBA;UAEnBA;;QAEFA,OAAOA,uCA6CXA;;QAzCIA;UAGEA,OAAOA,wEAsCbA;QAnCIA;UAEmBA;QAGPA;QACZA;wBACEA;wCACqBA,iBADrBA;YAGWA,KAk4EyBA;cAl4EhCA,+EAyBVA;YAvBQA;;;wBAIFA;;YACMA;cACFA;cACAA,oCAAcA;;0CAEKA;cAEVA,KAq3EuBA;gBAr3E9BA,+EAYZA;cAVUA;;;UAKKA,2BQphCGA;YRohCVA,+EAKRA;;QAFIA,OAAOA,uCAEXA;;K;gCAEmBE;yBACHA;MACdA;QAAqBA,WAEvBA;MADEA,OAAOA,gCACTA;K;+BAEYC;MACNA;eAAUA;QAEFA;;QAEyBA;;IAEvCA,C;OAOFC;MACEA,sBAAMA;IACRA,C;SAQAC;MACEA;QAA+BA;MAC/BA,sBAAMA;IACRA,C;sBAKMC;MACJA;;QAAmBA,OSj6BnBA,4CT46BFA;MAVMA,mBAAmBA;MAIvBA;QACEA,OAAkBA,wDAKtBA;MADEA,OAAkBA,+BACpBA;K;sBAKMC;MAIJA;QACEA,OAAkBA,oDAYtBA;MAVEA;QAIEA;UACEA,OAAkBA,oDAKxBA;MADEA,OSj8BAA,2CTk8BFA;K;sBAOcC;MACZA,OS18BAA,6CT28BFA;K;iBAiCAC;MAEEA,OAAOA,6BADSA,gBAElBA;K;8BAGAC;MACEA;;QS/iCIA;;;MTmjCJA;QAKEA;;;QAgBKC;MAPPD,cACFA;K;mBAGAC;MAGEA,yBAAOA,eACTA;K;mBAOMC;MAEJA,MAAyBA;IAC3BA,C;8BAEMC;MACJA,MAAyBA;IAC3BA,C;6BAYMC;MAKMA;;QAAIA;;;MAEEA;MAChBA,6BACIA;IACNA,C;iCAGMC;MAEGA;MAGPA;QA8CkBA;;oJA3CFA;2BACIA;QACNA;QACZA;UAIgBA;UACNA;;yBAGEA;;wFAMEA,UAEPA;MAMHA;;MAFWA;MASjBA;QAEcA;WACPA;QAEOA;QADFA;;;MAQZA,OS5uBAA,kGT6uBFA;K;oCAuBAC;MACEA,sBAAMA;IACRA,C;mCAqJSC;MAULA;MAIUA,iCAJAA;MAUNA;MACJA;QAA2BA;MAKXA;MACIA;MACTA;MACEA;MACEA;MAiBfA,OArHFA,+SAyGmBA,uHAcnBA;K;uCAMcC;MAmDZA,OAReA;;;;;;;OAQRA,YACTA;K;2CAkCcC;MASZA,OAPeA;;;;;;OAORA,YACTA;K;wBA8CAC;;8BACuCA;MADvCA,gEAGiCA,UAHjCA;IAGuEA,C;mBA+ClEC;MAGLA;;QACEA,OA7BFA,2CA2CFA;;QAVWA,OAAsBA;QAA7BA,yCAA6BA,0BAUjCA;;MANEA;QAA6CA,SAM/CA;MAJEA;QACEA,OAAOA,uBAAmBA,eAG9BA;MADEA,OAAOA,6BACTA;K;kBAKOC;MACKA;iBACeA;;MAKzBA,YACFA;K;2BAEOC;MACLA;;QACEA,SAqGJA;kBAjGgBA;;mBAMCA;QAKKA;QACMA;UAKtBA;;cAEIA,OAAOA,qBACCA,uBAAsBA,qDA6ExCA;;;cA1EgDA;cAAtCA,OAAOA,qBA5HfA,kBAsMFA;;;MArEEA;QAE8BA;QACMA;QACFA;QACOA;QACNA;QACOA;QACJA;QACOA;QACNA;QACOA;QAC/BA;QAAbA;UACEA,OAAOA,qBAAmBA,uBAAoBA,6BAwDpDA;;UAvDwBA;UAAbA;YAMEA;YAAPA,4BAA0BA,uBAAoBA,6BAiDpDA;iBAhDwBA,kDACPA,qDACAA,+CACAA,sDACAA,kDACAA,qDACAA,mDACAA;YACyBA;YAApCA,OAAOA,qBA9JXA,kBAsMFA;;;QAlCIA,OAAOA,qBAtITA,oEAwKFA;;MA9BEA;QC1zDOA;UD4zDHA,OSnrCEA,0BT+sCRA;;;;;;;SAMSA;QAvBLA,OAAOA,qBSrpDTA,oETmpDcA,kDAmBhBA;;MAbEA;QAIEA;UACEA,OSvsCEA,0BT+sCRA;MADEA,SACFA;K;yBAqBWC;MACTA;;QACEA,gBAAiBA,WAiBrBA;MAfEA;QAAuBA,OAoBvBA,4BALFA;uBAduBA;MACrBA;QAAmBA,YAarBA;MAKEA;MAVAA;;MAIAA,YACFA;K;kBAwBIC;MAEFA;QAAoBA,OAAcA,uBAMpCA;MALEA;QACEA,OAAkBA,mCAItBA;MADEA,OAAcA,uBAChBA;K;kBAsBAC;;+BA+CSA;MA1CPA;QACoCA;QACEA;QACpCA,iCAkCKA;;MAhCPA,aACFA;K;kBAuCAC;MAIaA;MAFHA;;UAEJA,OAAOA,gBAWbA;;UATMA,OAAOA,oBASbA;;UAPMA,OAAOA,0BAObA;;UALMA,OAAOA,gCAKbA;;UAHMA,OAAOA,sCAGbA;;MADEA,sBAAMA;IACRA,C;0BAIAC;6BAEiBA;MACfA;QAAkCA,gBAIpCA;MAHaA;;MAEXA,gBACFA;K;kCAEAC;MAOUA;MACRA;;yBAEYA;UADVA;;yBAGUA;UADVA;;yBAGUA;UADVA;;yBAGUA;UADVA;;yBAGUA;UAVZA;;UAYIA;;MAAJA;QACEA,OAAOA,mBA0BXA;MAXEA;;;;OAAOA,kCAWTA;K;uBA4BSC;;8BAcDA;6BAGAA;kCAEAA;sCACqBA;yCAGrBA;gCAGAA;8BAEAA;2BAKUA;4BACKA;6BACAA;uBAOfA;QAAiEA;MA6B/DA,sCAoZEA,+CAlZFA,cAkbRA;yCA/a0CA;MAkBDA,0BAZjCA;;UAEIA;;;;;;;MAmBNA;MAAJA;QAEMA;;;QAWgBA;;MAJlBA;;MAOJA,yDAAgCA,SAAhCA;0BACiBA;QAGfA;2BAESA;UASaA;UAAUA;;UAZdA;gCAMKA;QAGvBA;UACEA;YAEMA;;;QAIRA;;;;+CAS+BA;4CAKQA;MAKzCA,mBACFA;K;2CAEOC;MAELA;QAEEA,mBAoBJA;MAlBEA;QAEEA;UAEEA;QAGFA;;;;SAAOA,yCAWXA;;MADEA;IACFA,C;0BAEOC;;MAiBLA;;UAEIA;;;;WAAOA,uBAuEbA;;UA7DMA;;;;WAAOA,uBA6DbA;;UAnDMA;;;;WAAOA,uBAmDbA;;UAzCMA;;;;WAAOA,uBAyCbA;;UA/BMA;;;;WAAOA,uBA+BbA;;UArBMA;;;;WAAOA,uBAqBbA;;UAVMA;;;;WAAOA,wBAUbA;;K;yBAIOC;MAELA;QACEA,OAAOA,0EA4BXA;MAxBIA,OAAOA,kCAHGA,gDA2BdA;K;qCAEOC;;;MAMLA;;UAIIA,sBAwZNA;;UAtZMA;;;;WAAOA,uCA+EbA;;UApEMA;;;;WAAOA,uCAoEbA;;UAzDMA;;;;WAAOA,uCAyDbA;;UA9CMA;;;;WAAOA,uCA8CbA;;UAnCMA;;;;WAAOA,uCAmCbA;;UAxBMA;;;;WAAOA,uCAwBbA;;UAbMA;;;;;;WAAOA,wCAabA;;K;oCAEOC;MAEEA;WA8ILA;QAA+BA;WAJ/BA;QAA4BA;uBAxIlBA;MAIHA;MAAPA,SAwBJA;K;sBAwBFC;MACEA,OAAeA,iCACjBA;K;2BAoESC;MACLA,OM5jEeC,kCAHOC,eAgDRF,sBN+gEuBA,oBACvCA;K;2BAIOG;MAAoCA,cAAQA,UAASA;K;8BAIrDC;MAAuCA,cAAQA,aAAYA;K;mCAYpDC;MA/CdA;;aAkDMA;;ME3gFGA;qBF4gFmBA,gBAA1BA;qBACaA;;UAETA,YAINA;;MADEA,sBAAMA;IACRA,C;uBA4FGC;MAEHA;QAAmBA;MACnBA,YACFA;K;eA+BKC;MACHA,sBAinBAA;IAhnBFA,C;mBAWKC;MACHA,sBAaAA;IAZFA,C;yBAoEOC;MAELA,OAAOA,IADgBA,qBAEzBA;K;kBC5xFKC;MACHA;IAOFA,C;6BAoEAC;MAESA;0BAAoBA,CAAdA;kBAIYA,+BA/HlBA;MAgIPA;QAlFAC,+BFeYC;QEmEQF,aF5BeE,EE+FrCF;;qBAlEgCA,+BAjIvBA;MAkIPA;QAAyBA,kBAiE3BA;6BA7HyBG,kBAtEhBA;MAuIPH;QACUA,sBAA6BA,CAApBA;QACjBA;oBAGuBA,+BA5IlBA;UA6IHA;YA/FJC,+BFeYC;YEgFYF,aFzCWE,EE+FrCF;;yBArDgCA,+BA9IvBA;UA+IHA;YAAyBA,kBAoD/BA;iCA7HyBG,kBAtEhBA;;;;MAqJPH;QAQEA,WAsCJA;oCAnCgBA;gBAEHA;MAEXA;QACWA;SACGA;QAxHdC,+BFeYC;QE0GVF,aFnEiCE,EE+FrCF;;MAzBEA;SACcA;QACZA,kBAuBJA;;MApBEA;QACyBA;QAlIzBC,sBA6JoBD,gCF9IRI;QEmHVJ,SF5EiCI,EE+FrCJ;;MAhBEA;QACEA,OAAOA,sCAeXA;MAZEA;QAEEA,sBAAMA;cA7GMA;QAmHWA;QAjJzBC,sBA6JoBD,gCF9IRI;QEkIVJ,SF3FiCI,EE+FrCJ;;QAFIA,OAAOA,sCAEXA;K;sBAYAK;MACcA;MAlKZJ,iCFeYI,+BEoJCA;MAEbA,kBACFA;K;0BAEAC;MAGEA,OAAOA,2FACTA;K;6BAEAC;wCACoBA;MAGTA,QApJKA;QAoJZA,4CAIJA;;QAFIA,OAAOA,oDAEXA;K;sBAgBKC;oBACSA;QAAwBA,MAGtCA;;MADEA;IACFA,C;8BAGKC;MACHA;MAAiCA;MACAA;MAEjCA;gBAzLuBA;MA+LRA;MAEfA;QACgBA;QACJA;;QACVA,oBAAyBA,SAAzBA;oBACYA;UACyBA,SAAvBA;UACZA;YAEeA,6CADUA;YAEvBA;cAlONR,iCFeYQ;;;;;;ME+NZA,oBAAyBA,SAAzBA;kBACYA;yBACNA;gCA9RCA;;;;;;;;IAuSTA,C;aAmCKC;MAESA;iBAAcA;MAiBlBA,iCACJA,cALIA,yBAAsBA,cAFtBA,yBADsBA,cAAtBA,yBAAsBA,cADtBA,yBAAsBA,cADtBA,yBAAsBA,cAHtBA,wBAFmCA,CACvCA,cAA+CA;MAqBnDA;QACqBA;QACnBA;UAGmCA;QAA/BA;UACFA,4BAAoBA,SAApBA;sCACoBA;YAClBA;cAmBSA;;;oBAZFA;2BACOA;6BACEA;MAELA;MAEbA;MAEAA;IACNA,C;yBAEAC;MAEEA,OADeA,2BAEjBA;K;6BShJQC;6BAGeA;wBAEPA,KAGGA;MAEjBA;QAGEA,WAsBJA;MAnBEA;QACEA,gBAkBJA;MANWA,yBAFWA;QAElBA,uCAMJA;MADEA,OAAOA,oBACTA;K;wBCpJAC;+BAGMA;QACFA,OAAOA,6CAGXA;MADEA,aACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MCgsByCC;;qBAlVpCC;MACsBA,WAM3BA;K;iCAOUD;MAA8BA,6CAA8BA;K;mCA6xB5DE;MAA+BA,OAkCUA,uBAlCyBA;K;wCAKlEC;MAGNA,yBAmCEA,wCAGAA,8CAnCJA;K;oBAgvBGC;MACHA;QACEA,sBAAMA;IAEVA,C;oBASIC;MACFA;MAAgCA;;UAEtBA;;UAC0CA;;QAHpBA;MAAhCA;QAIEA,sBAAMA;MAERA;QAAiBA,cAEnBA;MADEA,UACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BNz5DaC;MAIcA,kBA0kIaA;MAnkIpCA,6BAjBIA,6DAmlIyBC,2BAjkI/BD;K;8BAEWE;MA4jFPA,gBAogDkCA;MAzjIpCA,2BA3BIA,yEAmlIyBC,oBAvjI/BD;K;8BA0EYE;oBAy+HmBC;MAv+H7BD;QACEA,OAAOA,gCA0+HoBA,UAv+H/BA;MADEA,iCACFA;K;2BAqJcE;MAGZA,UA60HmCA,iBA50HrCA;K;YA+JEC;MASFA,OAAiBA,qBA3COA,6BA4C1BA;K;eA+EIC;;kBAklH6BH;MAhlH/BG;;;;;;UAMIA,UAyINA;;wBAq8GiCA;UA3kHDA;UAM1BA;YAAuDA,UAgI7DA;UA/HMA,OAAiBA,+DA+HvBA;;wBAq8GiCA;UAjkHDA;UAM1BA;YAAuDA,UAsH7DA;UArHMA,OAAiBA,mEAqHvBA;;wBAq8GiCA;UAvjHDA;UAM1BA;YAAuDA,UA4G7DA;UA3GMA,OAAiBA,mEA2GvBA;;sCAtfWA;UA8YmCA;UAMxCA;YAIEA,UA8FRA;UA7FMA,OAAiBA,6CAgiHgBC,6CAn8GvCD;;oBAq8GiCE;UA3hHLF;0BAtZjBA;UAwZsBA;UAM3BA;YAEEA,UA4ERA;UA3EMA,OAAiBA,8EA2EvBA;;kBAnhB6CG;sBAiDlCH;UA+ZmBA;UAMxBA;YAAmDA,UA6DzDA;UA5DMA,OAAiBA,6DA4DvBA;;0BAq8GiCI;UA9/GCJ;kCAhZvBA;UAwZDA;UAMJA;YAEEA,UAyCRA;UAxCMA,OAAiBA,8FAwCvBA;;sBA/bWA;yBA26HgCA;UA5gHbA;oBAq+GGK;UA99GLL;UACtBA;YAEEA,UAsBRA;UArBMA,OAAiBA,yFAqBvBA;;qBAi8GiCM;UA58G3BN;YAAmBA,UAWzBA;kCA8+GkDA;UAn/G5CA;YAAsBA,UAK5BA;UAJMA,eAINA;;UAFMA,sBAAMA;;IAEZA,C;oBAEQO;MAQkBA;0BAk+GiBA;;MAj+GzCA;sBA07G+BA;QAx7GRA;QACrBA;UACYA;;;MAIdA,kCACFA;K;oBAEQC;MASkBA;4BA68GiBA;;MA58GzCA;uBA88GgDA;;wBAzCjBA;QAj6GRA;QACrBA;UACYA;QAEZA;;MAWFA,oCACFA;K;iCAEoBC;MASkBA;+CAhXhCA;;+CAUAA;wCA+WgCA;kCA3VhCA;2BAkWmBA;MAMvBA;QAGEA,yBAYJA;MA1ZMC;YAUSD;YAUAA;YAiBAA;MAoXbA,aACFA;K;iBAkBQE;iBAEYA;MAElBA,aACFA;K;uBAKKC;6BAEaA;MAChBA;QACEA;UACEA,OAAOA,kCAabA;QAJMA,OAuzG2BA,oBAnzGjCA;;MADEA,WACFA;K;0BAOIC;MACFA;MAAQA;+BA7CRA;UAkDeA;UACXA;YAAiBA,UAIvBA;;MADEA,OAAOA,sBACTA;K;gBAKIC;MAUOA,uBA3ETA;QA2EEA,8BASJA;MA8yGoCA;QAnzGhCA,OAAOA,4BAKXA;MADEA,OAAOA,+BADWA,0BAEpBA;K;sBAIIC;sBAiBQA,KAAwBA;;MAIlCA;QAAiBA,iBAUnBA;;QALIA,iBAKJA;MADEA,UACFA;K;iBAKIC;MAEuCA,gBAD/BA;MACVA,iEACFA;K;gCAOIC;iCACgBA;4BACNA;MACZA;QAAmBA,YAErBA;MADEA,OAAOA,0DACTA;K;oCAGIC;sDAzIFA,iEA8JYA;cAMMA,+BA7hBMA,mCA+hBpBA;;MAIJA,UACFA;K;yBASIC;;oBACUA;oBA4sGoCA;MA1sGhDA;QArgBiBA,2BA3COpB;QA+jBjBqB;QAZLD,UAGJA;;MADEA,WACFA;K;8BAOKC;MAEHA,2BADUA,wBAEZA;K;qBAyDIC;MAhFqBA,qCAhLvBC;MAmQAD;QAAyBA,kBAO3BA;MANaA;QAETA,OAolGiCA,0BAplGLA,KAIhCA;MAmnGoCA;QArnGNA,OAxDlBA,4BA0DZA;MADEA,OAAOA,sBACTA;K;qBAIKE;MAKUA,YA78BTA;MAy8BJA,uBAv8BMC,oDAw8BRD;K;sBAQME;MApwBKA;eAbKA;;MAuxBdA;QACEA,UAv9BIC,sBA2/BND,gBA/BFA;MAHgCA,qCA1pBNA;MA4oBXA,kBA78BTA;MA49BJA,iCA19BMD,8DA49BRC;K;eAuBKE;MACHA,OAAOA,oBA1oBUA,qBA3CO5B,8BAsrB1B4B;K;6BAuDKC;MAGCA;MAGKA;QAAPA,kDA6EJA;MA+2FIC;;;QAA2CA;MA17F7CD;QACEA,OAAOA,wCA0EXA;kBApnCmDA;MA4iCjDA;QACEA,OAAOA,sEAuEXA;MA3DEA;QACEA,OAAOA,0CA0DXA;oCAi5FiCnC;+BAJAI;MAj8F/B+B;QACEA,OAAOA,6CAmDXA;;;;;;;;;MA/CEA;QACEA,OAAOA,oCA8CXA;MA3CEA;yBA07FqC3B;QAn7F/B2B,aAtgCGA;iBA7FHA;UAgnCFA;YACEA,OAAOA,wDAsBfA;UAhBMA,OAAOA,oDAgBbA;;aATSA;QAoCmBA,iDAo3FWzB,oBAr6H5B2B;QA+gCPF,OAAOA,0EAOXA;;MALEA,OAAOA,8DAKTA;K;eAGKG;MA9uCMA,OAVHA;MA0vCNA,0BACFA;K;8BAgCQC;;;MAy0FJH;;;QA9zF+CG;MALjDA;;;;;QAMIA;QAFGA;;;MAhyCEA,OATHA;MAgzCNA,0BACFA;K;WAEKC;wBAm1F4BpC;;MAj1FxBoC;;;YAGEA;cACmBA,qCAi1FGxC;gBAh1FCwC,oCAg1FDtC;MAr1F/BsC,SAOFA;K;gCAGKC;MAGCA;MACJA;QAAoBA,OAAOA,kBAG7BA;MADEA,OAAOA,gBA54BiBA,eA24BRA,mDAElBA;K;wCAQKC;MACHA;QAAoBA,WAMtBA;MADEA,OAt0CSA,IA4nIsBC,qBArzFjCD;K;sBAGKE;MAGCA;MACJA;QAAoBA,OAAOA,kBAY7BA;mBA1vCeA;MAwvCKA,uBAzjBlBA;QAsjBEA,oBAKJA;MADEA,uCACFA;K;0BAIKC;MAGCA;MACJA;QAAoBA,OAAOA,kBAoB7BA;MAdEA;QAAgDA,YAclDA;MA2yFoCA;QAvzFNA,WAY9BA;mBAtxCeA;MAoxCKA,uBArlBlBA;QAklBEA,oBAKJA;MADEA,uCACFA;K;iCAIQC;MAGFA;MACJA;QAEMA;UACFA,aAcNA;aA/4CWA;QA64CPA,aAEJA;MADEA;IACFA,C;yCAIQC;MAGFA;MACJA;QACEA,aAIJA;WA55CWA;QA05CPA,aAEJA;MADEA;IACFA,C;kBAQMC;MACJA,sBALkBA,yBADMA,yBAAgBA;IAO1CA,C;kBAsBgBC;MAIZA,OAHiCA,4CAEFA,eADfA,kGAKlBA;K;0BAOAC;;IAAqEA,C;iCAE7DC;MACNA,OAHFA,iCAGuCA,+BACvCA;K;eAaGC;MA39CMA;2BAwnIsBhD,sBAIAJ;MA9pF/BoD,gBA8pF+BlD,yBA5pFrBkD,iCArjCcA,0BA3afA,WAm+CXA;K;aAIKC;MACHA,qBACFA;K;aAIQC;MACNA;QAAoBA,aAWtBA;MADEA,sBAAiBA;IACnBA,C;UAIKC;MACHA,WACFA;K;UAIQC;MACNA,aACFA;K;YAIKC;MACHA,YACFA;K;WAIKC;MACHA,0CACFA;K;WAMKC;MACHA;QAAoBA,WAGtBA;MAFEA;QAAqBA,YAEvBA;MADEA,sBAAiBA;IACnBA,C;YAIMC;MACJA;QAAoBA,WAYtBA;MAXEA;QAAqBA,YAWvBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;YAIMC;MACJA;QAAoBA,WAItBA;MAHEA;QAAqBA,YAGvBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;aAIOC;MACLA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAoBA,aAWtBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAoBA,aAGtBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;UAIKC;MACHA,iEAEFA;K;UAIIC;;QACkBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;WAIKC;;QACiBA,aAWtBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;WAIKC;;QACiBA,aAGtBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;UAIKC;MACHA,gCACFA;K;UAIIC;MACFA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;WAIKC;MACHA;QAAoBA,aAWtBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;WAIKC;MACHA;QAAoBA,aAGtBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;aAIKC;MACHA,gCACFA;K;aAIOC;MACLA;QAAuBA,aAEzBA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAuBA,aAWzBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAuBA,aAGzBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;qBAEOC;MACEA;MACPA,uCA29EyCA,SA39EzCA;QAGMA,+BAi7EyBA;MA96E/BA,QACFA;K;sBAEOC;;iCAy6EgCpE;2BAr6H5BoE;MAogDTA;QAEEA,aAAaA,iDAmBjBA;yBAq7E2CA;MAkBrCA;wBAlBqCA;MAh8EzCA;QACEA;QAEAA;UAAqBA;QAChBA,0BAq5EwBA;QAp5E7BA;0BAk5EmCA;QA/4EnCA;;MAEFA,eACFA;K;wBAEOC;MAKEA;MAGPA;6BA26EyCA;QAz6EvCA;UAC2BA;;6CAEWA;+BAEVA;QAC5BA;UACEA;kHAKFA;6BAEsDA;UAAOA;UAArCA;8CAAcA;4EAAdA;2BAo3EKA;yBAJA3E;UA9BcgC;YA4CI4C;;YA5CJ5C;UAh1EzC2C;YAEoBA;;QAItBA;;QA3B0BA;uBAjlDepE;+BA0ElCoE;qCAwJLA;mDA6xHqCA;qCAnxHrCA;mDAmxHqCA;wBA/vHrCA;yBA+vHqCA;MAl4EjBA;MAIxBA;QAGMA,wDAo1EyBA;MA70E/BA;QACEA;QAEAA;UAGMA,wDAu0EuBA;QAj0E7BA;;MAGFA;QACEA;QAEAA;UACEA;mBAozE6BA;YAlzE3BA;UAGEA,qCAqzEuBA,sCAFMA;;QA3yEnCA;;MAGFA;sBAEuCA;;;MAOvCA,0EACFA;K;gBAYOE;;kBAgxE0B7E;MA7wE/B6E;QAA4BA,eA+E9BA;MA9EEA;QAA6BA,gBA8E/BA;MA7EEA;QAA0BA,aA6E5BA;MA5EEA;QAA2BA,cA4E7BA;MA3EEA;QAAyBA,YA2E3BA;MAzEEA;QAWIA,OATSA,kBAywEkBjF,0BAlsEjCiF;MA1DEA;8BA4vE+BtC;QA1vElBsC;uCAsvEkB7E;QAhvE7B6E,6EAkDJA;;MA/CEA;QAEEA,qBAAmBA,kBA+uEU/E,gCAlsEjC+E;MA1CEA;QAESA,4BAwuE4BzE;QA9tEnByE,gBA3tDTA;QA6tDPA,iBAHcA,2FA+BlBA;;MAzBEA;QACEA,OAAOA,yCAwBXA;MArBEA;QACEA,OAAOA,iDAoBXA;MAjBEA;QAGEA,OAAOA,0BAgtEsBrE,8BAp4HtBqE,OAksDXA;MAPEA;gBA/wD2CpE;2BAgxDboE;QAEEA;QAAvBA;4CAAOA;QAAdA,qBAAOA,IAIXA;;MADEA,UACFA;K;kBAEOC;0BD1iEOA,mBACLA;MC2iEPA;QAAuBA,gBAEzBA;MADEA,mBACFA;K;sBAuLiBC;yBAXXC,GASAD;aAIFA;uBAbEC,GASAD;MAOFA,WACFA;K;4BAEWE;;qBAhBPA;kBAkBUA;MACZA;QACEA,OAAOA,sCAcXA;WAbSA;QAo/DsBA;QAnxDtBA;QA9NsBA;QAC3BA;;QAGgBA;QAYTC;QAVPD,iBAIJA;;QAFIA,YAEJA;K;sBAKYC;MACRA,qCA3CAA,WA2C+CA;K;4BAoCvCC;MACRA,OAAOA,8BA7EPA,WA6EiDA;K;kBAa1CC;MA6/DPA;qBA9lEAA;;MAoGFA;QAAmBA,YAIrBA;MAkEoBA,sBADGA;MAw7DrBA;MA1/DAA,UACFA;K;+BAEWC;;2BAv8DkCA;MA68D3CA;QACUA,mBA58DNA;MAy7HFA;MAz+DFA;QAAmBA,YAIrBA;MAiDoBA,sBADGA;MAw7DrBA;MAz+DAA,UACFA;K;kBAEWC;;2BAp8DkCA;MAs8D3CA;QACUA,mBAr8DNA;oCAy1H+BrF;MA4EjCqF;MA39DFA;QAAmBA,YAUrBA;MAHYA,uEAs4DmBtF,4BA76HtBsF;MA8/HPA;MAr9DAA,UACFA;K;+BAiCWC;SApyELA;SAIAA;MAwyEJA,UACFA;K;gCAqGWC;MAk0DPA;wBA9lEAA;MA+RFA;QAAmBA,YAErBA;MAn6EIC;SAmJEC;SAwLAA;MA8lEGF;MA0zDPG,QAjmEEA;MAgSFH,SACFA;K;4BASWI;MAozDPA;sBA5EiC3F;wBAlhEjC2F;MA8SFA;QAAmBA,YAMrBA;MAFIA;MA+yDFD,QAjmEEA;MA+SFC,SAKFA;K;4BAEWC;MAMTA;;2BAotD6B7F;QAltDvB6F;;;UAE6BA;QAFjCA;UAIEA,eAQNA;;MA58EIJ;SAmJEI;SA6CAA;MA2wEGA,GAhoEHA;MAgoEJA,mDACFA;K;gCAEWC;MAkxDPA;sBA5EiC7F;wBAlhEjC6F;MAoVFA;QAAmBA,YAMrBA;MAFIA;MAywDFH,QAjmEEA;MAqVFG,SAKFA;K;gCAEWC;MAMTA;;2BA8qD6B/F;;QA5qDvB+F;;YAESA;cAELA,4CA4qDmBjG;QAhrD3BiG;UAKEA,eAoBNA;;UAjBMA,iBAiBNA;aAhBWA;iCAuqDoBnG;UAnqDrBmG,gBA+pDqB/F,yCAIAF;YAlqDvBiG,mBAWRA;;YATQA,OAAWA,8CASnBA;;;MA//EIN;SAmJEM;SA6CAA;MA8zEGA,GAnrEHA;MAmrEJA,mDACFA;K;gCAEWC;MA+tDPA;sBA5EiC/F;wBAlhEjC+F;MAuYFA;QAAmBA,YAMrBA;MAFIA;MAstDFL,QAjmEEA;MAwYFK,SAKFA;K;gCAEWC;MAMTA;;qBAt4E+CA;QAw4EzCA;UAGFA,eAYNA;aAXWA;UACLA,OAqHFA,+DA3GJA;;UARMA,iCAQNA;;MAxiFIR;SAmJEQ;SA6CAA;MAu2EGA,GA5tEHA;MA4tEJA,mDACFA;K;gDAEWC;MAsrDPA;;wBA9lEAA;MA4aFA;QAAmBA,YAMrBA;MApjFIT;SAmJEU;SA6CAA;SA2IAA;MAovEGD;MAoqDPP,QAjmEEA;MA6aFO,SAKFA;K;kCAccE;;4BA2nD2BA;MAxnDvCA;6BAilD6BA,GAFMnG;MAzkDnCmG,QACFA;K;uCAEcC;;4BA+mD2BA;MA3mDvCA;uBA6mD8CA;4BA/CfA;4CAMFA,OAFMpG;;MAtjDnCoG,QACFA;K;iCAiBWC;MAKFA;;oBAwkDgCC;QAplDnCD;MAunDFA,gBA9lEAA;MAsfFA;QAAmBA,YAMrBA;MA9nFIb;SAmJEe;SA6CAA;SAeAA;oBA8+HmCA;WA1nInCA,2BA4nI0CA;SAp3H1CA;MAo0EGF;MAolDPX,QAjmEEA;MAufFW,SAKFA;K;+BAuCWG;MACLA;cA4+CyBzG;sBAIAK;QAsD3BoG,mBAv+HKA;;QA08EyCA;QAATA;;MAhBrCA,aAq/CiCxG;MA4EjCwG,gBA9lEAA;MAgjBFA;QAAmBA,YAMrBA;MAxrFIhB;SAmJEiB;SA6CAA;SAeAA;SA4HAA;MA03EGD;MA8hDPd,QAjmEEA;MAijBFc,SAKFA;K;8BA6BWE;MALPA;;gBAghDAA,QA9lEAA;MA2lBFA;QAAmBA,YAMrBA;MAnuFIlB;SAmJEmB;SA6CAA;SAeAA;SA4HAA;MAq6EGD;MAm/CPhB,QAjmEEA;MA4lBFgB,SAKFA;K;gCAqEWE;MA5BPC;sBAv8EUA;uCA2FVC;qDA6xHqCA;uCAnxHrCA;qDAmxHqCA;0BA/vHrCA;2BA+vHqCA;;MA96CvCD;QAIIA;QAEAA;;MAKJA;QAIIA;QAEAA;;MArd6CA;MAq5D/CD,gBA9lEAA;MA8qBFA;QAAmBA,YAMrBA;MAtzFIpB;SAmJEuB;SA6CAA;SAeAA;SA4HAA;MAw/EGH;MAg6CPlB,QAjmEEA;MA+qBFkB,SAKFA;K;uCA0BWI;MAJTA;8BAw0CmChH;gBA4EjCgH,QA9lEAA;MAutBFA;QAAmBA,YAYrBA;MARIA;MAs4CFtB,QAjmEEA;MAwtBFsB,SAWFA;K;uCAEWC;MAOTA;;wBA+0CuCA;QA50CNA;QAC/BA;wBAoyC2BA;mBAJAlH;;YA5xCvBkH;;;QAGJA;UACwBA;UAMEA;UAMxBA,OAAOA,iHAcbA;;;MAp5FIzB;SAmJEyB;SA6CAA;SAeAA;MAosFGA,GAxkFHA;MAwkFJA,mDACFA;K;kBA6HcC;MAMZA,0EAeFA;K;iBAqBWC;;uBAhB6BA;mBACDA;sBAmBnBA,gBAAlBA;QAXwCA;QAatCA;UACMA;aACCA;UACDA;aACCA;UACDA;;UAEJA;UACAA;;cAEIA;;cArBRA;cAyBQA;;cAzBRA;cA6BQA;;cA7BRA,QAkCUA,uBA/C8BA,UACCA,IAeNA;cAiC3BA;;cApCRA,QAmbiBA,qDAhcuBA,IAymCXC;cAjjCrBD;;cA3CRA,QAvrBOA,qCA0qBiCA;cA4DhCA;;cA/CRA,QA/qBOA,qCAkqBiCA;cAgEhCA;;cAnDRA,QAvqBOA,qCA0pBiCA;cAoEhCA;;cAvDRE,cATqCA;2BAgpCEA;cA5kC/BF;;cAGAA;cACAA;;cAGAA;cACAA;;yBAhFgCA;cAaxCA,QAyEoBA,+BAERA,2BAvF6BA,IAeNA,iBAPIA;cAmF/BA;;yBA5FgCA;cAaxCA,QAqFoBA,mCAERA,2BAnG6BA,IAeNA,iBAPIA;cA+F/BA;;yBAxGgCA;cAaxCA,QAiGoBA,mCAERA,2BA/G6BA,IAeNA,iBAPIA;cA2G/BA;;cAvGRA;cAAAE,cATqCA;2BAgpCEA;cA3hC/BF;;cAGAA;cACAA;;cAhHRE,cATqCA;2BAgpCEA;cAnhC/BF;;cA+hCNG,wBA5pCmCA;cA+WrCC,wBAnXwCD,UACCA;cAwmCZA;cA5lC7BC;;cAwHQJ;;cAxHRE,cATqCA;2BAgpCEA;cA3gC/BF;;cAuhCNK,wBA5pCmCA;cAsXrCC,6BA1XwCD,UACCA;cAwmCZA;cA5lC7BC;;cAgIQN;;cA+hCNO;cA/pCFA,QA4pCEA;cA5pCFA;cAAAL,cATqCA;2BAgpCEA;cAnzBhCF;cAhNCA;;cAGAA;;;;MApI2BA;MAyInCA,OAAOA,uBAzJiCA,UACCA,SAyJ3CA;K;uBAOWQ;MACLA;;sBACcA,SAAlBA;QAxJwCA;QA0JtCA;UAAyBA;QACXA;;MAzJhBA;MA4JAA,QACFA;K;4BAEWC;MAOLA;;sBACcA,SAAlBA;QAzKwCA;QA2KtCA;UACEA;YAAeA;UACHA;;UAC0BA;YOhwGKA;;YP+vG/BA;UACPA;YAGLA;;;MA6+BFA;MAz+BFA;mBAhMwCA;4BACCA;uBAwmCZ7H;mCAIAK;QA13DRwH,6CAw3DczH,UA5pBjC0H;QAztCFD;UACEA,+CAA4BA;QAsxB9BA,WApxBiBA;;QAoxBjBA;MA+LAA,QACFA;K;+BAEYE;MAEMA;mBAjNwBA;;eAgBLA;MAmMnCA;QAtMAA,WAwMwBA;;QAEXA,kCAtN4BA;oBAwmCZ/H;;YA5lC7B+H,WA+MkBA,kEAnNqBA;YA0NjCA;;YAtNNA,WAyN4BA;YACtBA;;;IAGRA,C;2BAOYC;MAjOyBA;mBAhBKA;;;MAsQxCA;QAEEA;;YAxPiCA;YA2P7BA;;YA3P6BA;YA+P7BA;;YAlQNA;YAsQMA;;;QAtQNA;MA4Q6BA;MAzQMA;MA4QnCA;;UA5QmCA;;mCA91BgBA;;;UA+mC9BA,wCAhSoBA;UA/oFvCnH;oBAUSmH;oBAUAA;oBAiBAA;UAsnFXA,WAiSgBA;UAEZA,MAoBNA;;UAvTEA,WA4SgBA,iCAkzBmBA;UA5yB/BA,MAKNA;;UAFMA,sBAAMA,oDAA8CA;;IAE1DA,C;oCAgCYC;MApVyBA;MAsVnCA;QAzVAA,WAnqBOA,qCAspBiCA;QAwWtCA,MAOJA;;MALEA;QA7VAA,WA3pBOA,qCA8oBiCA;QA4WtCA,MAGJA;;MADEA,sBAAMA,qDAA+CA;IACvDA,C;wBAEeV;MA+yBXA,+BA5pCmCA;MA+WrCA,wBAnXwCA,UACCA;MAwmCZA;MApvB7BA,YACFA;K;kBAWWW;MACTA;QAEEA,OAAiBA,wDAltCgCA,KA4tCrDA;WALSA;QACUA,WAAiCA;QAAhDA,yDAIJA;;QAFIA,WAEJA;K;mBAEYC;;uBAowB6BA;MAlwBvCA;QAEaA,wDAkwBiCA;IA/vBhDA,C;wBAEYC;;uBA2vB6BA;MAxvBvCA;QAEaA,wDAwvBiCA;IArvBhDA,C;uBAEWC;;0BAssBoBrI;MApsB7BqI;QACEA;UAAgBA,kBAusBWhI,SAjrB/BgI;mCAhwGSA;2BAw9HgCA;QA3uBrCA;UACEA,oBAmsByBA,WAjrB/BA;QAfIA;iCAgsB2BhI;0BAJAL;aAxrB3BqI;QAAgBA,kBAWpBA;MATEA;QACEA,sBAAMA;iCAlwGDA;gCAk+HgCA;QA3tBrCA,oBAorB2BA,WAjrB/BA;MADEA,sBAAMA,mDAAsCA;IAC9CA,C;aAsDGC;;kBAp7GKA;;QAAoBA,UAApBA;MAynIJA;MAlsBJA;QAuBSA;QA8qBPA;;MAjsBFA;QAAmCA,YASrCA;MAREA;QAAkCA,WAQpCA;MADEA,WACFA;K;cAuCKC;MAiBHA;;QAA8BA,WA2OhCA;MAsSIA;;;QAjhBmCA;MAGrCA;QAA4BA,WAwO9BA;eAoUiCvI;MAziB/BuI;QAA0BA,WAqO5BA;MAlOMA;QAAmBA,YAkOzBA;YA7rHmDC;MA89GjDD;QAA+BA,WA+NjCA;MA5N0BA;MACxBA;QAGMA,+BAgiByBA,EAJA9H;UA5hB6B8H,WAwN9DA;eAoUiCvI;;MAphB/BuI;QACEA;UACEA,OAAOA,iCAshBoBzI,uBAxUjCyI;QArMIA,2EAqMJA;;;QAhMIA;UACEA,OAAOA,wBAugBoBzI,gCAxUjCyI;QAtLIA;UACEA,OAAOA,wBA6foB3I,gCAxUjC2I;QA5KIA,kBA4KJA;;MAxKEA;QACEA,OAAOA,wBA+esB3I,gCAxUjC2I;MA5JEA;QAOcA;QANZA,OAAOA,gDA2JXA;;MA9IEA;QACOA,6BAqdwBzI;UA7c3ByI,YAqINA;QAnIIA,OAAOA,uBAEDA,gEAiIVA;;MAxHEA;QAEUA;QADRA,aAEIA,wBA6byBhG,gCAxUjCgG;;MArGEA;QACMA,qCA4ayBzI;UApa3ByI,WA4FNA;QA1FIA,OAAOA,gCAIDA,uDAsFVA;;MA/EEA;QAEUA;QADRA,aAEIA,iCAoZyBhG,uBAxUjCgG;;MA/DEA;QAAsBA,YA+DxBA;MA5DiCA;;QAE7BA,WA0DJA;MAtDMA;;QAAqDA,WAsD3DA;MAjDEA;;UAC2BA,WAgD7BA;QA/CIA;UAAsCA,YA+C1CA;mBA5jHWA;;yBA26HgCA;;UAxZfA,YAyC5BA;QA8XMA;;QAlaFA;0BA4W6BA;;UAzWtBA,mEACAA;YACHA,YA+BRA;;QA3BIA,OAAOA,gCAmWsB/H,yCAxUjC+H;;MAlBEA;;UAC2BA,WAiB7BA;QAhBIA;UAA+BA,YAgBnCA;QAfIA,OAAOA,uDAeXA;;MAXEA;QACEA;UAAgCA,YAUpCA;QATIA,OAAOA,wDASXA;;MALEA;QACEA,OAAOA,qDAIXA;MADEA,YACFA;K;sBAEKE;MAWCA;MAECA,6BAyT0BlI;QAxT7BkI,YA8FJA;qBAprHWA;;uCAwJLA;;qDA6xHqCA;;MAjVzCA;QAA2DA,YAgF7DA;MA9EMA;uCAp8GAA;;qDAmxHqCA;;MArUzCA;QAEEA,YAkEJA;MAhEEA;gCAmUgDA;QAhUzCA,+CAuRwBA;UAtR3BA,YA4DNA;;MAxDEA;gCA2TgDA;QAtTzCA,+CA6QwBA;UA5Q3BA,YAkDNA;;MA9CEA;gCAiTgDA;QA5SzCA,+CAmQwBA;UAlQ3BA,YAwCNA;;0BA9/GMA;;2BA+vHqCA;;MA/RzCA;sBAsPqCA;eApPnCA;UACEA;YAA4BA,YA2BlCA;wBAwNuCA;UAjPjCA;UACAA;YAAyCA,YAwB/CA;8BAoNmCA;UAzO7BA;YACEA;cAAiBA,YAoBzBA;YAnBQA;;qBAsR0CA;UAlR5CA;YAAiCA,YAevCA;qBAmQkDA;UA/QvCA,kCAsOsBA;YArOzBA,YAWRA;UAVMA;;;aAIFA;kBA0N+BA;UAzN0BA,YAK7DA;QAJMA;;MAGJA,WACFA;K;uBAEKC;;iBAsNkCtI;;aA3MrCsI;uBAn0DI1D,GASA0D;QAu0DFA;UAAkBA,YA4CtBA;QA3CIA;UA6LmCA;UA3LjCA;;sBA9dAA;QAkeFA;UAAqBA,YAqCzBA;yBA2L2CA;QAL/BA,uDAnvHcC,aAsxD6BA;QAqwDnDD;UAE+BA,qEAkLIA;QA9KnCA,OAAOA,8DA3wHAA,oBAuyHXA;;MATEA,OAAOA,mCA9xHEA,yCAuyHXA;K;yBAEKE;;uBAyLsCA;MAvKzCA;QAgCSA,iCAgGsBA;UA/FzBA,YAKRA;MADEA,WACFA;K;oBAEKC;;mBA/0HMA;;wBA88HgCA;;QAjHnBA,YAaxBA;WA2DuCvI;QArEnBuI,YAUpBA;MAREA;QAGOA,mCAkEwBA;UAjE3BA,YAINA;MADEA,WACFA;K;cAEKC;kBAuD4B9I;;;QApD3B8I;UACKA;YACmBA,kCAsDGlJ;cArDCkJ,iCAqDDhJ;MAzD/BgJ,SAKFA;K;uBAWK9G;MAA8BA;MAO/BA;;;QAA2CA;MAPZA,SAGlCA;K;kBAMI+G;kBA4B4B/I;MA1B/B+I,0FAKFA;K;uBA4CcC;MAFRA;;sBAsBqCA;MAhBvCA;kBAzBmCA;QAoC3BL;;IAPVK,C;0BAKeL;MACXA,8CAlvHoBA,aAsxD6BA,IA89DDA;K;;;;;;;;;;;;;;;;;;;;0CQ1vIpCM;MACdA;MAESA,QADLA;QACFA,+DAgCJA;cA9BMA,iCACAA;QAAiCA;QAEzBA;QACCA;;QASIA,0BACXA,yBAPYA,uEAQhBA;QAEAA,OAAOA,mEAaXA;aAJWA,QADEA;QACTA,qEAIJA;MADEA,OAAOA,uDACTA;K;0CAEYC;MAKVA,uBACIA,yBALYA;IAMlBA,C;gDAEYC;MAKVA,kBACIA,yBALYA;IAMlBA,C;yCAEYC;MACwBA;MAU3BA;IATTA,C;eA0BAC;;;;IAaAA,C;4BA0FWC;MACXA,OAjCAA,2BCgJAC,eAAyBA,gBAAzBA,2BDhJAD,sCAkCFA;K;mBAUQE;MAENA;eACUA;MACVA,gBAxBwBA,QAyB1BA;K;eASQC;MACNA;IACFA,C;gBAQQC;MACNA;IACFA,C;iBAOQC;MAENA,0BACIA,2BAAyBA;IAC/BA,C;kBASKC;MAECA;;wBAEqBA;;QASvBA;;;;UAEAA;;UCuCFA,wBAAyBA;gBAuJvBA;gBACAA;UD1LAA;;;IAEJA,C;2BAIkBC;;;;;;;;;;;;;OACAA;MAuBhBA,OAAYA,CEkVeA,0CFlVgBA,wFAG7CA;K;gCGxToBC;MAChBA;MAAUA;QACeA;QACvBA;UAAwBA,iBAG5BA;;MADEA,QAAkBA,oBACpBA;K;mBFdUC;WACMA,oBACIA;QAAYA,WAKlCA;MAH2BA,WAG3BA;K;uBAOWC;WCioBkBA,oBD/nBNA;QACDA;MAGpBA;QAGYA;UACWA;UACnBA;YDjBSA,sCCkBiBA;;;;;WDnBpBA;QACGA;MC2BbA,OElCAA,mCFmCFA;K;4BA2kBcC;;;8CAnRYA,yBAqRtBA;QA/JOA;cAgKLA;;MAEFA;QACEA,6BN3bJA,6EMiciBA;QAEbA,MA6BJA;;0BA3B2BA;aAClBA;MACPA;QAGmBA,qEAAmBA;cAjStCA,gBAA0BA;cAC1BA;QAkSEA;QACAA,MAmBJA;;MAhBWA;kBACGA;UACeA;;UADYA;;QN/czBA;MM8cdA;QAM+BA;QAC7BA,4BAAoBA;QACpBA;QACAA,MAOJA;;;MCiqCEA,2CDpqCOA,QCoqCkCA,wBDpqCVA;IAGjCA,C;iCA0IYC;;;kHAIVA;QAAaA;eA1cQA;QAAOA;QAAeA;QA6czCA;UACEA;YA/VGA;YCy0CPA,6BDv+BmBA,kBACAA;;UAGfA,MA0KNA;;cArKoBA;gCACyBA;QACzCA;YACWA;UACTA,sCAAsBA;gBACtBA;sCACwBA;;mBAGGA;yBAAOA;cAQ/BA;cACDA;QAKkCA;iBAlrBhBA;UAkrBGA;;UAvCpBA;QAuCLA;mBAprBeA,OAAOA;UAsrBPA;mBAAWA;YAARA;;YAAHA;UAAbA;YApYGA;YCy0CPA,+BDj8BmBA,oBACAA;YAEbA,MAqIRA;;qBAjI0BA;UAApBA;;;YA4FIA;iBAxxBmBA;UA2wBvBA;YAxE+BA,yFAyE7BA;eACKA;YACLA;cA9BsBA,8EA+BpBA;iBAGFA;YAzBcA,gEA0BZA;UAKJA;;qBAIIA;;uBACAA;yCAvsBuCA,YAAsBA;;YAssB9BA;UAAnCA;YAESA;2BAGUA,SAASA;mBApmBTA;cAmNNA,uBAAUA;oBAC3BA;cACOA;oBAtEPA,YACYA,qBAAkCA;oBAC9CA,wBAA4BA;oBAwdlBA;cACAA;;cAEAA;YAKJA,MAeRA;;;uBAXqBA,SAASA;QAlaXA,uBAAUA;cAC3BA;QACOA;mBAkaAA;mBACcA;QADnBA;UA3fmBA;gBADrBA;gBACAA;;UA8feA;gBAzffA,gBAAwBA;gBACxBA;;cA4fEA;;;IAEJA,C;yBAgEOC;MACPA;MAAiBA;QACfA,OAAOA,4FAaXA;;MATmBA;QACfA,OC8wBiEA,oBDtwBrEA;MANEA,sBAAoBA;IAMtBA,C;kBG5hCKC;MACHA;oBAAiBA,gBAAjBA,wBAAuDA;;oBAEpCA;;QAEjBA;;QACOA;;IAEXA,C;uBAEKC;;;QAKDA;;;;aAIIA;UJnBJA,6CAAyBA,OIoBMA;;IAGnCA,C;0BAMKC;MAnDHA;wBAqDoCA;MACpCA;;cAEOA;UJlCLA,6CAAyBA,OImCMA;;sCAGlBA;IAGjBA,C;kCAQKC;;cACCA;MAAJA;QACEA;mCACwBA;QACxBA,MAgBJA;;MA3FEA;8BA8E4CA;MAC5CA;aACQA;;;mCAG0BA;aAC1BA;sDACeA;QAErBA;;;IAIJA,C;qBA0BKC;;uBACsBA;WACXA;QAGZA,wCAHYA;QAIZA,MAcJA;;MFwrDIA,oDAAyCA,wBEzrDPA;IACtCA,C;iCC0/EUC;MCtoDSA;MDyoDjBA,OC1oDAA,uDD0oD8BA;K;eE/0D3BC;MAC8BA,MAMnCA;K;sDD1pBkBC;;QAEEA;MACAA;QACdA,OAAOA,2FAWXA;MAPkBA;QACdA,OJ2oD+DA,8CIroDnEA;MAJEA,sBAAUA;IAIZA,C;qBAyWGC;MJgwCDA;II9vCJA,C;oBAGKC;IAAoBA,C;oBJy7BpBC;MACHA,iCAA+BA;IAGjCA,C;YAEEC;;cACmBA;MAAnBA;QAAoCA,OAAOA,UAY7CA;;MANQA;;QAEGA;QAAPA,SAIJA;;;;K;iBAEEC;;cAOmBA;MAAnBA;QAAoCA,OAAOA,aAY7CA;;MANQA;;QAEGA;QAAPA,SAIJA;;;;K;kBAEEC;;cAQmBA;MAAnBA;QAAoCA,OAAOA,oBAY7CA;;MANQA;;QAEGA;QAAPA,SAIJA;;;;K;0BAqCKC;MAS8BA;WAHlBA;QAGPA;MAKRA;IACFA,C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;2BM7wCSC;uBACOA;MAGZA,qCACFA;K;2BAEYC;MAIVA;;;;IAQFA,C;0BAoBOC;MAIOA;MAIZA;MAoKOC;MAlKPD,YACFA;K;wCAgKQC;MACNA,sCAAOA,kEdrfTA,uFcsfAA;K;sCAMQC;MACNA,Od7fFA,qFc8fAA;K;uBCxbcC;MAEZA;MAAIA;QACFA,cAwBJA;MCyXAA;;QD7YMA;QACFA;;UAEKA;QACLA,eAAUA;;;QAYVA;gDAAiBA;QAAjBA;;iBC4Z0CA;MDzZ5CA,sCACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;8BEoOWC;MAULA;;;mGAQJA;QACaA;kCAAKA;oBAALA;QACXA;QACoBA;QACpBA;QACAA;UACSA;UAA2CA;UAA1BA;wCAASA;UAAjCA;;;+CAAMA;UAAkBA;UACjBA;UAA2CA;UAA1BA;wCAASA;UAAjCA;gDAAMA;UAAkBA;UACjBA;UAA2CA;UAA1BA;wCAASA;UAAjCA;+CAAMA;UAAkBA;UACjBA;UAAqCA;UAApBA;wCAASA;UAAjCA;gDAAMA;UAAkBA;UAVfA;UALOA;;;MAoBpBA;QACEA;UAiCOA;UACAA;UAFTA;YACoDA;YAA1BA;0CAASA;YAAjCA;;;iDAAMA;YAAkBA;YAC0BA;YAA1BA;0CAASA;YAAjCA;kDAAMA;YAAkBA;YACjBA;YAAPA;kDAAMA;;YACNA;iDAAMA;;;YAG4CA;YAA1BA;0CAASA;YAAjCA;;;iDAAMA;YAAkBA;YAC0BA;YAA1BA;0CAASA;YAAjCA;kDAAMA;YAAkBA;YACjBA;YAA2CA;YAA1BA;0CAASA;YAAjCA;kDAAMA;YAAkBA;YACxBA;iDAAMA;;;UAxCJA,QAgBNA;;QAdIA,4CAcJA;;MATEA;QACaA;kCAAKA;oBAALA;QACXA;UAA4BA;QAC5BA;;MAImCA;gCAAKA;MAF1CA,sBAAoBA,yEAEsBA,qCAALA;IAEvCA,C;8BA0RWC;MAzDFA;;;;;0BA8EgCA;0GACvCA;QACaA;kCAAMA;QAANA;QACXA;QAC2BA;QAAhBA;6CAAeA;8BAAfA;QACXA;UACqCA;UACpBA;UACfA;YAESA;YAAPA;;;8CAAMA;;YACCA;YAAPA;+CAAMA;;YACCA;YAAPA;8CAAMA;;;YAbCA;;UAgBTA;eACKA;UACLA;YAAqCA;UACrCA;YACEA;cACEA,sBAAMA;YAEDA;YAAPA;;;8CAAMA;;YACNA;+CAAMA;;;YAENA;cACEA,sBAAMA;YAERA;;8CAAMA;;;UAOiBA;UACzBA;YAA2BA;UAE3BA,OAAOA,uEAcbA;;QAZIA,sBAAMA;;MAERA;QACEA,gCASJA;MALEA;QACaA;kCAAMA;QAANA;UACsBA;;MAEnCA,sBAAMA;IACRA,C;kCAOiBC;MAOIA;;uBAGCA;;MAIpBA;QACEA;MAEFA;QAAsBA,ObikByBpM,4Ba7jBjDoM;MADEA,OAAOA,oCACTA;K;oCAaWC;;;;;;MAMTA;;;;UACEA;UACWA;wCAAMA;UAANA;UACXA;YACEA;;;;UAIFA;YACEA;cAAoBA;YACpBA;YACOA;0CAAMA;YAANA;;UAETA;YACEA;cAAoBA;YACpBA;YACOA;0CAAMA;YAANA;;UAETA;YACEA;;;;UAIFA;;;MAEFA,aACFA;K;gCAoBWC;MAETA;;QAAkBA,YA0CpBA;MA9PSA;8BAwNPA;QACaA;sCAAMA;QAANA;QACXA;UACEA;YACEA;YACAA;YACAA;;UAEFA;YACEA;YACAA;YACAA;cAAkBA;YACXA;0CAAMA;YAANA;;YAEPA;;QAMJA;UAEEA;YAAqBA;UACrBA;UACAA;UACAA;YAAkBA;UACXA;wCAAMA;UAANA;;QAGTA;UAA8BA;QAC9BA;QACAA;QACAA;UAAkBA;;MAEpBA;QACEA,sBAAMA;MAERA,2BACFA;K;;;;;;;;;;;;;;;;;;gBDznBaC;MACHA;;QAARA;MACiCA;MACjCA;MACAA;IACFA,C;oBAoCQC;MAEuCA;;MAC7CA;QAEEA;;MAMFA,aACFA;K;gBAkBQC;MAC4BA;MAAZA,SAOxBA;K;iBAOQC;MACNA;MAAaA;QAAYA,OtBpPvBC,gBANiCC,4CsBkQrCF;MALoBA;MAClBA;QACEA,8BADFA;MAGAA,WACFA;K;+BAoCQG;MAEKA;;MAkBFA;MAAPA,SAGJA;K;+BAqBcC;yBAEQA;MACpBA;QAAkBA,SAGpBA;MADEA,OAAkBA,6DACpBA;K;0BA+FcC;MACgBA;MACvBA;QAAqBA,aAa5BA;mBEpKoBA;;UFuKgCA,cAbVA;eAC7BA;;QAYuCA,cAVZA;eAC7BA;UASyCA,kCAPVA;;MAGxCA,aACFA;K;sDAgBQC;MAEJA,OASJA,kCAT6CA,6BAC1BA,sCAAgCA,gCAAeA;K;sBAyM5CC;MAAWA,+BAAsBA,YAAsBA;K;wBGvL/DC;MACDA;;MAEXA;QAAkBA,aAIpBA;MAHEA;QAAiBA,wBAGnBA;MAFEA;QAAgBA,yBAElBA;MADEA,0BACFA;K;yBAUcC;MACZA;QAAcA,aAGhBA;MAFEA;QAAaA,cAEfA;MADEA,eACFA;K;uBAEcC;MACZA;QAAaA,aAEfA;MADEA,cACFA;K;sBlBviBcC;MACgBA;QAC1BA,OAAOA,qBAMXA;MAJEA;QACEA,OTmwFGlS,sBShwFPkS;MADEA,OekLkBA,iCfjLpBA;K;6BA8BaC;MACXA;MACAA;MACAA;IACFA,C;mBAYAC;;IAA8BA,C;kBAuD9BC;;IAEqBA,C;uBAcrBC;;IAEoBA,C;oBAyDpBC;;IAG6DA,C;oBAe7DC;;IAQgEA,C;8BAuFrDC;MAUTA;QAEEA,sBAAiBA;MAEnBA;QACEA;UAEEA,sBAAiBA;QAEnBA,UAGJA;;MADEA,cACFA;K;+BAWWC;MACTA;QACEA,sBAAiBA;MAEnBA,YACFA;K;yBAsEAC;;IAMqEA,C;qBA8FrEC;;IAAqCA,C;uBAcrCC;;IAAkCA,C;eAyBlCC;;IAAwBA,C;gCAaxBC;;IAAkDA,C;uBmBpmB1CC;MAA4BA,OAOpCA,yBAPuDA;K;oBAiDjDC;;IAA8DA,C;kCC8vBtDC;MAKZA;MAAIA;QACFA;UAEEA,cAgBNA;QAdIA,6CAcJA;;MAZ+BA;MAC7BA;;QAEEA;;QAGAA,UALFA;UAKEA,gBALFA,sBAKmBA;QAAjBA,CALFA;;MLhUYA,6CAAqBA;MKuUjCA,sCAIFA;K;iCAYcC;MAKZA;MAAIA;QACFA,6CAYJA;ML1XAA;MKiXEA;;QAEEA;QLlWUA,EAAZA,wCAAsBA;;QKqWpBA,UALFA;UAKEA,gBALFA,sBAKmBA;QAAjBA,CALFA;;;iBLlV4CA;MK0V5CA,sCACFA;K;2BA0BGC;MAwB6BA;;;MAGhCA;;;QACOA;UAAeA,MAkFxBA;QAjFwBA;QACpBA;uBACeA;QACfA;;MAQGA;QACHA;UAAoCA,MAqExCA;QApEqBA;mCAAMA;QAANA;QACGA;mCAAMA;QAANA;;QAEKA;QACzBA;QACKA;UACHA;YACEA,+BAAYA;YACZA,MA4DRA;;UA1DyBA;UACCA;qCAAMA;UAANA;mCACKA;;UAEHA;UACtBA;iBAGOA,iBAAPA;YAEgBA;YACdA;YACAA;cAQEA;;;gBAEYA;2CAAMA;gBAANA,sBAAmBA;gBAC7BA;;cAEFA;cACAA,MAgCVA;;;UA7B4BA;UACHA;mCACMA,2BAA2BA;;;uBAOtCA;QAEhBA;QAfgBA;;;MAqBlBA;mCAAqCA;;QACzBA;mCAAMA;QAANA,sBAAmBA;QAC7BA;UAEEA;UAzBcA;;;MA4BlBA;QACEA;MAEFA;MACAA;IACFA,C;eC90BaC;MAuByBA;MAAkBA;M3BP7CA,8BADAA,qBADAA,qB2BSuDA;MAA5DA,cAySJA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BCveFC;;oBACiBA;MACfA;QAAsBA,eAexBA;;;;;OAdYA;MAWaA;;MAEvBA,UACFA;K;yBAqBAC;MACkCA;MAAVA;MAAtBA,OPiCoBA,sDOhCtBA;K;gBAOEC;MACAA;QAEEA,QAIJA;;QAFIA,OAAOA,qCAEXA;K;kBAuDmBC;MACjBA;;QACEA,sBAAMA;;;;;OAEOA;MAWWA;MAC1BA,aACFA;K;0BA4MAC;MAC0BA;MAApBA;QAAaA,OAAOA,qBAE1BA;MADEA,OAAOA,iBACTA;K;oBC5UKC;MACDA,oBACEA,gEAGAA,yBACAA,0BACAA,iCACAA,0BACAA,2BACAA,0BACAA,2BACAA,4BACAA,4BACAA,2BACAA,qBAAWA;K;SAGTC;MACFA;QACFA,aA8BJA;MADEA,OAzBgBA,qBVuVPA,uFU9TFA,cACTA;K;cAyCEC;MAEAA,OAAOA,gCACTA;K;mBAmcUC;MjB/NRC,wBAAyBA,gBAAzBA;oBAlQIC;MiBifJF,eAbgBA,yBAAuBA,kDACzBA,yBAAuBA;MAarCA,SACFA;K;sBAsCKG;MACDA,gZA+BCA;K;WAGGC;MACFA;QACFA,QAgEJA;MADEA,OA1DeA,sBVpRNA,uFU8UFA,SACTA;K;;;;;;;;;;;;;;;;;;;;mBCjoBUC;MACUA;;oBAC4BA;;MAC9CA;QAEEA;UAA0BA;YACpBA;0CAAMA;yBAANA,sBAAoBA,0BAAwBA;;YADxBA;;;UAExBA;;QAEFA;UAEUA;QACHA;QAAPA;UAAmBA;YAAUA;YAAPA;wCAAMA;uBAANA;;YAAHA;;;UACjBA;;QAGFA;UACEA;YAAkBA,sBAAMA;;UAExBA;QAGYA;;MAEhBA,aACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0BCVEC;;uBAKiBA;MAAfA;QACEA,kBAAMA;MAMSA,8EAJIA;MARvBA;IASAA,C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mBCnFWC;MAGaA;wDAApBA,sBAAqCA;MACzCA;QACEA,gBCkQEA,OAAUA;QJzMZA;;QCsDFA,6BAM2BC,4EE9GXD,8DFsHsBC,qBIjJlCC;QF8BFF;gDACwCA;QAGpCA;mBFqHAA,gBAA0BA;UAC5BA,gBG8HAG,OAAUH;iBH5HVA,cATkCC;;eAWpCD;eACAA;QACAA;;MEzHFA,cACFA;K;2BAEKI;MAEEA,+CADLA,sBACsBA;;UFwHpBA;IEtHJA,C;QAEKC;MAESA;;;kDAFTA;QAESA;;;;;;cAALA;oBCoF8BA;gBACjCA,kBAAMA;cAQeA,mBD7FCA;gBC8FxBA,WD9FwBA;cC0HQA,kBDzHbA;cAIrBA;cCiOIA,UAAUA;cpC2uGZA;;cgCp7GAA;gBIyMEA,UAAUA;gBJzMZA,iCGWEA;;cHXFA,4BG6WAA;;cACJA;;;MA3YcA;IA2YdA,C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MC5XMC;;iBADIA;MACJA,8CAA2BA,mCAA0BA;K;;;;;;;;;;;;eEjDtDC;MACHA;QAEEA;QACAA,MAoBJA;;;QAdIA;QACAA,MAaJA;;MATEA;QACEA;QACAA,MAOJA;;MADEA;IACFA,C;oBC9BKC;MAEHA,6BCaAA,wEDdgBA;IAElBA,C;qBASKC;MAEHA,6BCRAA,uFDOgBA;IAElBA,C;sCEmOKC;MACDA;;;kCACMA;UAAeA,cAGvBA;;MADEA,WACFA;K;kBC/NmBC;MACnBA;;UAEIA,OAAOA,iG9BszCsCnQ,kD8BpyCnDmQ;;UAVQA,OAAOA,8IAUfA;;UAFMA,sBAAMA;;IAEZA,C;;;;;;E3CoRiCC;OAFjBC;MAAoBA,yBAAsBA;K;gBAEhDD;MAAYA,4CAA+BA;K;cAE5CE;MAAcA,yBCqJLA,2CDrJiDA;K;kBAgBzDC;MACNA,sBAAwBA,+DAAqBA;IAC/CA,C;mBAESC;MACLA,OOyzBGA,oBADGA,qCPxzByDA;K;;EAQ9CC;cAAdA;MAAcA,uBAAgCA;K;gBAU7CC;MAAYA,iCAAwCA;K;mBAGnDC;MAAeA,sCAAmCA;K;;;;;OAWpCC;MAAEA,oBAAcA;K;cAGhCC;MAAcA,aAAMA;K;gBAEnBC;MAAYA,QAACA;K;;;;;EAmDAC;gBALbC;MAAYA,QAACA;K;mBAEZC;MAAeA,0BAAQA;K;cAGzBF;MAAcA,uBAA+BA;K;;;;;cAyB7CG;MACiCA,0BAApBA;MAClBA;QAAyBA,OAAaA,oDAExCA;MADEA,oCAAkCA,0BACpCA;K;;;EAiBqBC;gBAHbC;MAAYA,QAACA;K;cAGdD;MAAcA,uBAA+BA;K;;EAqB/BE;gBAHbC;MAAYA,QAACA;K;cAGdD;MAAcA,uBAA+BA;K;;;SG9V/CE;mDAE4BA;MAN/BA;MAMAA;IACFA,C;YAyGKC;MACHA;4DACIA;MAlHJA;MAkHeA;QACbA;QACAA,MAOJA;;MAJEA;QAEEA,cAFFA;IAIFA,C;sBAEKC;MACCA;MAAMA;iBAAMA;MAChBA;QAAcA,MAKhBA;MAJEA;QAA4BA,sBAAMA;MAClCA;QACEA;IAEJA,C;WAuBYC;;MACVA,OE8HFA,kEF9HwCA,QE8HxCA,kEF7HAA;K;eAyGEC;MACWA;;MAAXA,eAAWA,OACbA;K;cA+SOC;MAAcA,OyCxKJA,mDzCwK+BA;K;gBAahCC;MAAYA,OAiI5BA,sCAEyBA,SAnIGA,+BAiI5BA,4BAjIkDA;K;gBAE1CC;MAAYA,OAAWA,qCAAoBA;K;cAE3CC;MAAUA,sBAAiCA;K;UAsCxCC;MACLA;0CAEwBA;QAASA,sBAAMA;MAC3CA,eAAOA,OACTA;K;aAEcC;mDAQ4BA;MALpCA;0CAIwBA;QAASA,sBAAMA;MI2VtCC;IJzVPD,C;mBA2CSC;MAAeA,2BIkTdC,+BJlTyCD;K;;;;;;;eAiC7CE;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAEzBC;;kBACUA;oBAAUA;eAKnBA;QACIA;QAANA;;gBAGEA;MAAJA;QACEA;QACAA,YAKJA;;MAHEA,qBAAWA;;MAEXA,WACFA;K;gBA1BGC;;K;;;;W0ClwBCC;MACFA;;QAEEA,mBAOJA;MALEA,AAAIA;QAkEmBC,0CAECA;QAnEtBD,aAIJA;;MADEA,sBAAMA;IACRA,C;mBA6HOE;MACLA;MACAA;QACEA,sBAAiBA;MAEHA;iBAEaA;;MAAzBA;kCAAOA;MAAPA;QACFA,aAGJA;0DAKgBA;MAEdA;QAEEA,kBAAMA;gBAEmBA;;gCAAKA;oBAAvBA;MACsBA;gCAAKA;uBAALA;gBAC3BA;MAAJA;QACWA;QAGKA,cAFFA;;MAhBdA,sDACFA;K;cAqBOC;MACLA;QACEA,aAIJA;;QAFIA,oBAEJA;K;gBAEQC;MACFA;;MAGJA;QAAsBA,2BA6BxBA;MAxBiBA;MACEA;MAIJA;MAWGA;MAOhBA,kHACFA;K;QAwBkBC;MAChBA;MAGAA;QAAiBA,QAOnBA;MANEA;QAAgBA,aAMlBA;MAFIA,qBAEJA;K;eAeIC;MAEFA,4DAEMA,iCACRA;K;eAEIC;MACEA;MACJA;QAEEA,mBAgBJA;MAdEA;QAGEA;UACEA,OAAOA,oBAUbA;aARSA;QAELA,OAAOA,mBAMXA;MAFEA,sBAAMA,0DACiCA,uBAAWA;IACpDA,C;uBA4BIC;MACFA;;QACMA;;;QAKAA;;MANNA,SAOFA;K;sBAOIC;MACFA,0CASFA;K;mBAiDSC;MAAeA,qCAAkCA;K;;;;EA+MlCC;mBAAfA;MAAeA,qCAAkCA;K;;;;EAWlCC;mBAAfA;MAAeA,wCAAqCA;K;;;;clBpoBxDC;6BAEqBA;qBACNA;MAAlBA;QAA0BA,YAE5BA;MADEA,iBAAgBA,4CAClBA;K;gBA8EKC;+BAOuBA;MAGRA,0BARYA;QAOLA,YAI3BA;MAHIA,qDAGJA;K;eAGOC;MAGLA,OAAOA,0BADUA,iDAAiCA,SAEpDA;K;eAJOC;;K;QAqKSC;MACdA;;QAAgBA,SAelBA;iCAdyBA;QAAaA,eActCA;MAbEA;QAEEA,uBAAYA;MAIdA;QACEA;UAA6BA;QACrBA;QACRA;UAAgBA;QAChBA;;MAEFA,aACFA;K;iBAsCIC;0BAGQA;oBAQUA;MAAlBA;QACeA;MAEfA,O1B8wBFA,oC0BxwBFA;K;cAwBOC;MAAcA,eAAIA;K;gBAMjBC;MAGFA;wBACgBA,0BAApBA;QAC8BA;QACrBA;QACAA;;MAEFA;MACAA;MACPA,gDACFA;K;mBAGSC;MAAeA,wCAAqCA;K;cAErDC;MAAUA,sBAA4BA;K;UAE9BC;MACVA;MAEEA,yBAAcA,qBAAQA;QAASA,sBAAMA;MAC3CA,eAAOA,OACTA;K;;;;;;SmBtWKC;MACCA;MAAYA;uBAAMA;MACtBA;QAAoBA,MActBA;sBAbiBA;gBACXA;aAAQA;MAAZA;QA4BcA;QACdA;UAAcA;;UA0CZC;UACGA;UACLA;UACAA;UACAA;UAEOA;;QjCmrCwChU;QiC7tC/C+T;aACAA;;;MA/BEA,8CAAiBA;WAMnBA;IACFA,C;aAkCUE;MACJA;MAAqBA,SAArBA;QAAcA,+CAIpBA;MAHEA,OjCstCEC,eArCSD,oBkCpaJA,kBD5wBkBA,0CAARA,gBAAgBA,QAAQA,kBAAeA,sBAE1DA;K;cAEQE;MAAUA,8BAAOA;K;;;;cLxGlBC;MAELA,yCADcA,SAIhBA;K;;;;;gBpCHgBC;MAAYA;aAqT5BA,0BAEuBA,yBAvTKA,uBAqT5BA,wCArTiDA;K;WA8IrCC;;MACRA,OA4PJA,2EA5PmCA,gBA4PnCA,+EA5P6CA;K;;;eA2KvCC;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAIzBC;MACoBA;kBAAVA;;kBAAUA;eACnBA;QACFA,sBAAMA;gBAEJA;MAAJA;QACEA;QACAA,YAKJA;;MAHEA,8BAAWA;;MAEXA,WACFA;K;2BAxBGC;;K;;;;gBA0CaC;MAwBhBA,aAxBiDA;MAArBA,4BAA+BA,yBAAUA,KAAzCA,sBAwB5BA,+BAxBwEA;K;cAGhEC;MAAoBA,aAAVA;8BAAgBA;K;;;;cAuB7BC;;kBACCA;;QACFA,8BAAWA,gBAAaA;QACxBA,WAIJA;;MAFEA;MACAA,YACFA;K;eAEMC;MAAoBA,aAATA;kCAASA,sBAAIA;K;2BAf3BC;;K;;;EA6BuBC;cAAlBA;MAAUA,qCAAcA;K;eAC9BC;MAAwBA,sBAAGA,sCAAyBA;K;;EAsBtDC;gBAXgBA;MAAYA,2BAA2BA,sBAAVA,4BAAoBA,KAWjEA,qCAXoEA;K;WAGxDC;MAlEZA;MAmEIA,iEAA6BA,gBAnEjCA,8DAmE2CA;K;;;cAStCC;MACHA;oBAAOA,qBACDA,KADCA;kCACDA,UAAaA;UACfA,WAINA;MADEA,YACFA;K;eAEMC;MAAWA,OAAUA,IAAVA,wBAAiBA;K;;;;;gB2Chc1BC;qBACMA;MACZA;QAAkBA,WAKpBA;MAH8CA,oDAANA;;MAEtCA,WACFA;K;cAGAC;MAAcA,wBAAUA,wBAAQA;K;OCTlBC;MAAEA;oBAAyDA;MAAvCA,wCAAmBA,2BAAeA,iBAAKA;K;;;;ECmB5CC;cAAtBA;MAAcA,kCAAyBA;K;;;;cAqEtCC;MAAUA,mBAAQA,OAAMA;K;aAEpBC;qBACCA;MACXA;QAuDKA,uBAtDmBA;;;MAGxBA,WACFA;K;iBAWKC;MACHA;QAAoBA,YAGtBA;MAFEA;QAAwBA,YAE1BA;MADEA,OjDugFKA,IiDvgFmBA,6BAC1BA;K;UAEYC;MACLA;QAAkBA,WAGzBA;MADEA,WAAsBA,QAAfA,KADoBA,SAAfA,MAEdA;K;aAEKC;MACGA;;MAAOA;mBACEA;oBACUA,gBAAzBA;QAGEA,aAFQA,WACEA;IAGdA,C;YAEgBC;MAAQA,OAkCxBA,oBAlCyCA,kBAkCzCA,qCAlC+CA;K;;;cAoCvCC;MAAUA,qBAAUA,OAAMA;K;gBAIKC;MAUvCA,aAT4CA;MAAxCA,mDASkEA,SAAtEA,uDATsDA;K;;;eAWhDC;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAEzBC;;kBACCA;qBAAUA;QACZA;QACAA,YAKJA;;MAHEA,oCAA6BA,UAAlBA;;MAEXA,WACFA;K;4BAbGC;;K;;;;kBjDuBQC;mBACLA;MgD7MAA;QhD6MuBA,SAE7BA;MADEA,WAAOA,4BAA6CA,gBACtDA;K;2BAiBSC;MACPA;eAfmBA;QAeLA,QAAOA,WASvBA;gBAPMA;MAAWA;0CAA6BA,sBAApBA,6BAA6BA;MACrDA;QAAwBA,QAHHA,WASvBA;MElIqBA;MF8HnBA;QACEA,UAASA;;MAEXA,WACFA;K;sBAEyBC;MACvBA;eAzBqBA;QAyBLA,QAAOA,UAWzBA;gBAV2BA;MAAoBA;;gBAEzCA;MAAWA;+EAA8BA;MAC7CA;QAA6BA,QAJNA,UAWzBA;MQzOAA;MRoOEA;QACEA,iBgDpPEA,ahDoPoDA,YAAnBA,oBAC/BA;MAENA,OiDxQFA,gEjDyQAA;K;;;;UA6kB2BC;MACrBA;MAAkBA;;QAAlBA,WAAUA;MACVA;MACAA;;IAEDA,C;;;;oBAilBLC;;gCAEyDA,WAD3CA;MAEZA;QAAmBA,WAmBrBA;MAlBeA;gBACTA;MAAJA;;gBAGIA;MAAJA;;gBAGIA;MAAJA;;gBAGIA;MAAJA;;gBAGIA;MAAJA;;MAIAA,aACFA;K;;;cAmNOC;MACLA,iDACFA;K;;;cAaOC;;;kBACDA;MAAJA;QAAqBA,oCAA4BA,qBAMnDA;gBALMA;MAAJA;QACEA,iCAA0DA,2BAI9DA;MAFEA,iDACoDA,2BACtDA;K;;;cAQOC;mBAAcA;e0BzyCDA,wC1ByyCgDA;K;;;cAQ7DC;MAGLA,iCAD6BA,kEAE/BA;K;;;;cAyMOC;;iBACDA;MAAJA;QAAoBA,SAQtBA;eAL+BA;wDAEnBA;MAEVA,WAAOA,oCACTA;K;;;;cA+nBOC;MAMcA,uBAJDA;0DAEeA;MAEjCA,6EACFA;K;;;;;;;;;;;;;cAqBOC;sBAEDA;MACJA;QAAkBA,yCAEpBA;MADEA,qBAAmBA,4BACrBA;K;;;OA6BcC;MAAEA;oBAKhBA;MAJEA;QAA4BA,WAI9BA;MAIyBC;QAPKD,YAG9BA;MAFEA,WARoBA,oCASMA,oBAAiBA,UAC7CA;K;gBAGQC;MAENA,6BADsCA,cACDA,gCAfjBA,iBAgBtBA;K;cAGOC;MAGLA,yBAzBkBA,uCA3mEJA,gCAqoEgCA,kBAChDA;K;;;cA+LOC;MAELA,yCADwBA,6CAI1BA;K;;;cAOOC;MAAcA,8BAAgBA,QAAQA;K;;EA4kBKC;cAA3CA;MAAcA,uDAA0CA,SAAQA;K;;;EQ1iGvEC;cA5SQC;MAAUA,+BAAOA;K;YAITD;MAAQA,mEAwSxBA,wCAxS0DA;K;iBAMrDE;wBAEaA;MACdA;QAAqBA,YASzBA;MARIA,cA8OKC,aAtOTD;K;UAmBYE;MACVA;;sBACgBA;QACdA;UAAqBA,YAWzBA;sBAqMSA;wCA9MyCA;QAA9CA,SASJA;aARSA;mBACMA;QACXA;UAAkBA,YAMtBA;mBAqMSA;QAvMEA,gCAFuCA;QAA9CA,SAIJA;;QAFIA,8BAEJA;K;iBAEGC;;mBACUA;MACXA;QAAkBA,WAMpBA;MA0KaA,aAqBJC;MAnMKD;MACZA;QAAeA,WAGjBA;MADEA,aADyBA,OAClBA,iBACTA;K;aAEcE;;;MACKA;MAGkBA;MAHnCA;uBACgBA;QAEdA,8DADqBA,YAAqBA;aAErCA;oBACMA;QAEXA,2DADkBA,SAAeA;;oBAQxBA;QACXA;UAAiCA,YAAfA;QACPA;qBA4KJC;QA1KPD;UAC2BA;;UAGbA;UACZA;kBAC2BA,OACpBA;;YAGLA,YADyBA;;;IAhB/BA,C;iBAsBEE;;;MACgBA;wBACNA;MADNA;QAA6BA;QAAXA,oBAAiBA,wBAIzCA;;MAHYA;MACNA;MACJA,YACFA;K;YAEGC;MAEQA,0CAAsBA;MAA7BA,SAMJA;K;aA4BKC;MACgBA;;kBAAOA;2BACNA;aACpBA;QAGEA,kBAFQA,qBACEA;mCAEWA;UACnBA,sBAAMA;mBAEIA;;IAEhBA,C;oCAEKC;;;MAC4CA;MAEEA;kBA2F1CA;MA5FPA;QAC6BA;;YAEtBA;IAETA,C;2BAEGC;MACDA;;QAAmBA,WAMrBA;kBA8ESA;MAlFPA;QAAkBA,WAIpBA;MAHEA;;MAEAA,WAAOA,iBACTA;K;eAEKC;UAKHA,sBAAkBA;IACpBA,C;oBAGkBC;;;eA6GlBA,wBA5G6CA,2BAAKA;eAC5CA;aACFA,eAASA;;kBAEgBA;UAAKA;YACzBA;aACLA,WAAaA;;;MAGfA;MACAA,WACFA;K;iBAGKC;;uBACgCA;mBACJA;MAC/BA;aAEEA;;gBAESA;MAEXA;aAEEA;;YAEKA;;MAGPA;IACFA,C;6BAaIC;MACFA,OAA4BA,iCAC9BA;K;6BAOIC;MACFA;;QAAoBA,SAOtBA;sBANeA;MACbA;QAEWA,iBADgBA,GAChBA;UAAuBA,QAGpCA;MADEA,SACFA;K;cAEOC;MAAcA,OAAQA,2BAAiBA;K;mBAwB9CC;MAIcA;;;MAMZA,YACFA;K;;;;;cAkBQC;MAAUA,gBAAKA,oBAAOA;K;gBAGdC;MA2BhBA,aA1BqCA;MAAnCA,4CAA8CA,mBA2B7BA,SADnBA,gDAzBAA;K;;;eA6BMC;MAAWA,gCAAaA;K;cAEzBC;;kBACmBA;eAAlBA,sBAAuBA;QACzBA,sBAAMA;kBAEGA;MACXA;QACEA;QACAA,YAMJA;;QAJIA,mCAAWA;aACXA,aAAaA;QACbA,WAEJA;;K;4BArBGC;;K;;;EP6BqBC;UAAPA;MAAOA,WAA0BA,UAAUA;K;;;EAErCA;UAAnBA;MAAmBA,WAA6BA,sBAAsBA;K;;;EAEtDA;UAAhBA;MAAgBA,WAAeA,iBAAiBA,iBAAIA;K;;;EWg9BpD/Y;mBA/zCKgZ;MAAeA,4BAAUA;K;iBAElBC;MACdA,mEA+zCEjZ,gDA9zCJiZ;K;iBAFgBC;;K;;;;;;cAkUDC;MA8DfA;QA5DIA,uDAAyCA,QAO7CA;;QAFIA,eAAOA,OAEXA;K;sBAwBKC;MAIgBA;MAAjBA;IAEJA,C;oBAEKC;MACHA;QAGEA;IAEJA,C;;;iBAmBgBC;MAnYSA,wDAoYRA;MADDC;MAEdD,aACFA;K;iBAHgBC;;K;;;;mBAqIPC;MAAeA,0BAAQA;K;cAgP3BC;MAAQA,0CAAkCA;K;;;;;cA8EvCC;MAAUA,sBAAgCA;K;;;;UA2BlCC;MACGA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;;cA+BKC;MAEHA;MACIA;MAlhBJA;6BAsdwBA;MACxBA;MACAA;MACAA;QAAiBA,kBAAiBA;MACtBA;6BAIcA;MAC1BA;QACEA,kBAAMA;MAKGA;MAEXA;MA6CEA,MAGJA;K;;;;;;mBAmBSC;MAAeA,6BAAWA;K;;;;;mBA0C1BC;MAAeA,6BAAWA;K;;;;;mBA0C1BC;MAAeA,2BAASA;K;UAEpBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA0CSC;MAAeA,2BAASA;K;UAEpBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA0CSC;MAAeA,0BAAQA;K;UAEnBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA6CSC;MAAeA,4BAAUA;K;UAErBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA0CSC;MAAeA,4BAAUA;K;UAErBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA2CSC;MAAeA,kCAAgBA;K;cAEhCC;MAAUA,sBAAgCA;K;UAErCC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBAsDSC;MAAeA,2BAASA;K;cAEzBC;MAAUA,sBAAgCA;K;UAErCC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;aAQgBC;MAGdA,OASEA,eAVWA,yBADFA,uCAAkCA,UAG/CA;K;aAJgBC;;K;;;;;;;;ENryBCC;WAnabA;MAEFA,yCA8ZsB3c,4BA7ZxB2c;K;WAKIC;MAA8BA,OAmajBA,qBAXOC,iCAxZmDD;K;;;EAylCtDE;cAAdA;MAAcA,0BAAaA,YAAWA;K;;;cA0VtCC;MAAcA,0BAAQA;K;;;;UQrhDzBC;;cACUA;QACRA;MACCA;IACHA,C;;;;UAMOC;MAELA;MAAiBA,WAAjBA;eAG4DA;eACxDA;;IACLA,C;;;;UASHC;MACEA;IACFA,C;;;;UAOAC;MACEA;IACFA,C;;;;gBAkCF5R;cAgEOA;QAxDOA,gBACNA,yBAPiBA;;QASrBA,sBAAMA;IAEVA,C;;;UAXI6R;MAGEA;IACFA,C;;;;cAmECC;;;wBAEMA;;QAAuBA;gBAC3BA;QACHA;;kBAGAA;oCAFeA;UAEfA;;UAEAA;;IAEJA,C;mBAEKC;mBAGDA;cADEA;QACFA;;QAEAA;IAEJA,C;;EAsEgBC;UAAZA;MAAYA,0CAAgDA;K;;;;UAEvCA;MAGvBA,4Bd66CFA,oCc96CoCA;IAEnCA,C;;;;UA0C0CC;MACzCA,IAAkBA,YAAWA;IAC9BA,C;;;EG/SsBC;cAAhBA;MAAcA,eAAEA,OAAMA;K;;;;;;;;ciCoBxBC;IAAYA,C;eAIZC;IAAaA,C;oBAnCSC;;K;wBACAC;;K;;;oBAkIlBC;MAAgBA,WAACA,WAAuBA;K;gBA+C3BC;;;0BAWlBA;MAJsCA;gBA9EpBA;Q9BwlBtBA,oCJzD2BC,gBIyD3BD;QAGEC,oBAAkBA;QAClBA;UACEA,eJmmC0CA;QkClnD1CD,SAeJA;;YlCkc2BE;;;oCkC/cFF,gClCknD0CG;MI3rDpDD;MAgE8BE;MJynDCC;MkCxyD9CL;;MAOUM;MAARA;MAoIAN;kBAAaA,oBAAeA;qBAESA;MACrCA;MACaA;MACAA;MACbA;QACEA;;QAEQA;eA4CIA,4BAAoBA;QAEhCA,mBAAYA;MAEdA,mBACFA;K;oBAwBMO;MzCwXNA,SyC7esBA;QAuHlBA,oEAIJA;MADEA,OzCmXFA,kEyClXAA;K;sBAyDKC;;;;gBAhKkBA;MAmKrBA;QACEA,sBAAUA;0BA1JOA;MA8JnBA;QAAcA,MAgChBA;MA7BYA;WAOVA;yDAEAA;yBAtSkCA;QAuShCA;sBACeA;UACbA;;6BAE+CA;UAC/CA;YA3JkCA;mCAAaA;YAEnDA;cAEEA;;cAESA;YAEXA;cAEEA;;cAEKA;YAG2BA;YAArBA;;;;;qCAkJmBA;;;eAtLbA;QA4LjBA;IAEJA,C;mBAEKC;eA9NiBA;iBAmOHA;eACFA;MAGfA,kBAAYA;IACdA,C;0BA5R2BC;;K;yBACAC;;K;;;;;EAqSIC;oBAAtBA;MAAgBA,kFA9NFA,kBA8NkCA;K;oBAEzDC;MzCyPAA,SyCzduBA;QAkOnBA,uCAKJA;MADEA,OAAaA,sDACfA;K;eAEKC;MACHA;MAKyBA;gBArONA;MAgOnBA;QAAcA,MAehBA;sBA7PuCA;;QAmPnCA;;iBArOiBA;UAwOfA;QAEFA,MAKJA;;MAHEA,yBAAiBA;IAGnBA,C;;;UAHmBC;gEACfA,kBAAaA,WAAKA;IACnBA,C;cAFgBC;;K;;;mBnC3UdC;;iBACEA;aAwSmBA;QAxSEA,sBAAUA;MACFA;MAsBlCA,6BAtBWA,YAAQA;IAErBA,C;mBAJKC;;K;;;cAkBAC;;;wBAEmBA;eADjBA;aAsRmBA;QAtREA,sBAAUA;MACpCA,oBAAoCA;IACtCA,C;;;sBA6HKC;MAEIA,SApCiBA;QAmCLA,WAErBA;MADEA,WAxCiBA,OAAOA,oBgC7FEC,mChC6GeD,sBAwBkBA,iCAC7DA;K;iBAEYE;;6BAEeA;;;;uBAaVA;kBA1DEA,OAAOA;MAiDNA;QACPA,uDAGIA;;QAGJA,yBACOA;;QAOTA;QAAPA,SAiBJA;;QAhBIA,wBAFFA;oBA7DwBA;YAkEpBA,sBAAMA;UAORA,sBAAMA;;UAZRA;;IAkBFA,C;;;oBA2HUC;;;sCAcgDA;qBC0R/BA;2BDtSEA;QAEbA,+DACAA;UACVA,sBAAoBA;;sECq8CyCA;QDx7CnDA;;MAzDhBA;MA6DEA,oBA1PFA;MA2PEA,aACFA;K;kBAMUC;;;sCAEiDA;MAvE3DA,wBAAyBA,gBAAzBA;MAuEEA,oBA5PFA;MA6PEA,aACFA;K;qBAkFKC;UAEHA,cAAwBA;UACxBA;IACFA,C;kBASKC;UAGHA,gBACYA,mBAAkCA;UAC9CA,4BAA4BA;IAC9BA,C;kBAEKC;;kBAtJDA;MAwJFA;QACWA,iFAAgBA;aACzBA;;QAEAA;UArCKA;qBArHgBA;YAgKjBA;YACAA,MAURA;;UARMA;;QC2zCJA,0CDvzCEA,QCuzCuCA,wBDvzCfA;;IAI5BA,C;uBAEKC;MACHA;;;QAAuBA,MA+BzBA;gBA/MIA;MAiLFA;QACmBA,4EAAoBA;aACrCA;QACAA;0BAEiCA;UAC/BA;wBAEgBA;gBAETA;;;QAGTA;UAvEKA;qBArHgBA;YAkMjBA;YACAA,MAURA;;UARMA;;QAGUA,MAAZA;QCsxCFA,0CDrxCEA,QCqxCuCA,wBDrxCfA;;IAI5BA,C;sBAEiBC;MAIEA,qEAAUA;MAEpBA,IADPA;MACAA,wCACFA;K;uBAEiBC;MACEA;MAEjBA;sBACkCA;eACxBA;;MAIVA,WACFA;K;yBASKC;MAKHA;;;QAEEA,wBACEA,kDASSA;;QAXbA;QAgBEA;QAKAA,oBAAkBA;;IAItBA,C;wBA0EKC;MAGcA;MACPA;MADmBA;WA5N7BA;WACAA;MA6NAA;IACFA,C;2BAEKC;MAEHA;iBA7VqBA;kBA6VIA,iBAA6BA;QAA9BA;;QAAHA;MAArBA;QACEA,MAKJA;MAH+BA;MAC7BA;MACAA;IACFA,C;oBAEKC;MAGcA;MACPA;MAAOA;MADYA;MAnO7BA,uBEteFC;MF2sBED;IACFA,C;oBAGKE;;uBAaCA;kCAAMA;QACRA;QACAA,MAGJA;;MADEA;IACFA,C;6BAqCKC;MACHA;;;MCujCAA,0CDtjCAA,QCsjCyCA,wBDtjCjBA;IAG1BA,C;kBAMKC;;8BAECA;MAAMA;QAERA;QACAA,MAIJA;;MADEA;IACFA,C;yBAEKC;;MCkiCHA,yCD9hCAA,QC8hCyCA,wBD9hCjBA;IAG1BA,C;;;;UA5R4BC;MACtBA,oCAAsBA,YAAMA;IAC7BA,C;;;;UAgCuBC;MACtBA,oCAAsBA,mBAAMA;IAC7BA,C;;;;UAwCCC;;iBAEEA;;;QAEEA,wBAAyBA;;QAD3BA;QAEEA;QACAA;;IAEHA,C;;;;UACQA;MAEPA;IACDA,C;;;;UAOeA;MAChBA,gCAAeA,QAAGA;IACnBA,C;;;;UAkD4BC;MAC7BA,sCAAiBA,aAAQA;IAC1BA,C;;;;UA4GuBC;MACtBA,oCAAmBA;IACpBA,C;;;;UAsBuBC;MACtBA,gCAAeA,YAAOA;IACvBA,C;;;;UAoEGC;MAMMA;;yBAEeA;QA3nBlBA,mBArFUC,OAAOA,egC7FEC,6BhCkHYD;;QA0rBhCD;QAEEA;QA/ZDA,SAgaKA,8CAAsBA,OAha3BA,oBAgayCA;;UAhazCA,EAiaGA,yDAAuBA,OAja1BA;;UAmayCA;UAAGA;;YEj3BtBA;;UAF/BA,EFm3BYA;;;UAEFA;QACAA,MA2BJA;;gEAjjBmBA;2BACFA;;UA+GdA,EA0aGA,2DA1aHA;YA2aGA;;QAGFA,MAmBJA;;;qCAbyBA;QA7jB/BG,2CA+pB4BH;QAhGlBA,gCACEA,sGAGSA;;UAIXA;UACAA;;IAEJA,C;;;;UAVMI;MACEA,8CAAmCA;IACpCA,C;;;;UACQA;MACPA,mCAA4BA,qBAAGA;IAChCA,C;;;;UAOPC;MACEA;;;eACyBA;;;QAptBiBA,gBAotBIA;QAptB7CA,EAotBCA,0BAvvBSC,OAAOA,oBASjBA,oBgCtGmBC,MhCsGiBD;;QA6uBrCD;QAEEA;QACsCA;QAAGA;;UEr5BpBA;;QAF/BA,EFu5BUA;UACAA;;IAEJA,C;;;;UAEAG;MACEA;;QA7cCA,8CA8cyBA,OA9czBA;;QA+cKA,oDACAA,SAtvBYC;UAuvBSD,EAAvBA,0BAAuBA;YACvBA;;;QALJA;QAOEA;QApdDA,sCAqdeA,OArdfA;cAqd6BA;;YAC1BA;;;UAEsCA;UAAGA;;YEt6BtBA;;UAF/BA,EFw6BYA;;;UAEFA;;IAEJA,C;;;;;cIgZUE;MJx/BhBA;gCAAyBA;QI0/BnBA;MACJA,2CACEA,6CAIQA,0CADQA;MAMlBA,aACFA;K;;;UAVIC;;;IAECA,C;cAFDC;;K;;;UAIQD;mBACNA;;8BJ9pBAA,eI8pBiBA;oBJvpBUA;MAlNVE;QADrBA;QACAA;MAoNEF;IIspBCA,C;;;EKvxC0BG;gBH8xBvBA;MAAYA,+EAAiCA;K;OAEvCC;MAAEA;oBAIhBA;MAHEA;QAA4BA,WAG9BA;MAFEA,mDACoBA,4BAAkBA,mBACxCA;K;;;cAkBKC;0BACHA;IACFA,C;eAEKC;0BACHA;IACFA,C;;;UDtkBKC;;;MAISA;gBApCWA;MAkCvBA;QAAiBA,MAMnBA;MALEA;QACEA;;QAEAA,oBAiQJA;IA/PAA,C;cA2BKC;IAELA,C;eAEKC;IAELA,C;iBAaKC;;uBACWA;;QAgPZA,+BAhPyBA,uBAgPzBA;QAhPYA;;yBA0SEA;MAChBA;eACEA,4BAAoBA;;eAEpBA,6BAA6BA;gBAlYRA;MAsFvBA;QACEA;;QACAA;UACEA;;IAGNA,C;eAIKC;;;MAM4BA;gBA3GLA;WA0G1BA;MACAA,qCAAsBA;;MAEtBA;IACFA,C;iBAsFKC;;kBA/LoBA;mCAiMJA,SAAQA;kBACzBA;;QACmBA;UAhMgBA;sBAAIA;uCAwXvBC;;;QAxLhBD;UACEA;;;;aAKJA;QACEA;UACEA;UACAA,MAgBNA;;QAjO0DA;QAoNtDA;UAAqCA;aACrCA;QACAA;UACEA;;UAEAA;;;MAKJA;QACUA,KAARA;IAEJA,C;gBA7XmBE;;K;;;;;yCAuYGC;;0BAQlBA;MAEAA;MAIFA,OC2UGA,yDAAuBA,qDD1U5BA;K;YAfsBC;;K;;;;;cAgIjBC;MACHA;;gBARsBA;MAQtBA;QAAiBA,MAcnBA;MAZEA;aAEEA;QACAA,MASJA;;MAPEA,oBAAkBA;WAMlBA;IACFA,C;;;UAPoBC;;iBACDA;;QACfA;MACAA;QAA+BA,MAEhCA;+CA4BaA,QA7BDA;iBAuBSA;wBAAiBA;QAEvCA;MACAA;UACEA;2DAvGFC,QAASA,kBAAUA;IA6ElBD,C;;;;kBAwJEE;;gCACoBA;MACvBA;aAEEA;oBACIA;QAAJA;UACEA;UACAA;;;aAIFA;IAEJA,C;eAvFiBC;;K;;;;;;UJ+wBcC;MACvBA,gCAAoBA,YAAOA;IAClCA,C;;;;gBA0PIC;MACHA;;;aACgBA,kBAAgBA;UAC5BA;UACAA,MAMNA;;QAJIA;;QALFA;QAMEA;QA8DFA,mBAAiBA,qBAAOA;;IA3D1BA,C;uBAEKC;MACHA;;;;aACgBA,kBAAgBA;UAC5BA;UACAA,MAMNA;;QAJIA;;QALFA;QAMEA;QAkDFA,mBAAiBA,qBAAOA;;IA/C1BA,C;yBA4BgBC;MACdA,OAAOA,6EACTA;K;UAYiBC;MAAmBA,WAAIA;K;WAetCC;wBACgDA;WAA7BA,oBAAUA;QAAYA,iBAE3CA;MADEA,OAAOA,mCACTA;K;gBAGEC;qDACgDA;MAAEA;MAAFA,KAA7BA,oBAAUA;QAAYA,oBAE3CA;MADEA,OAAOA,iDACTA;K;iBAEEC;mEACgDA;MAAEA;MAAMA;MAARA,KAA7BA,oBAAUA;QAAYA,2BAE3CA;MADEA,OAAOA,6DACTA;K;8BAM8BC;MAEzBA,0EAACA;K;;EAlDSC;UAANA;MAAMA,mCAAgBA,GAAEA;K;;;EMr1CjCC;cA9WQC;MAAUA,+BAAOA;K;YAITD;MACdA,uCAyWFA,2CAxWAA;K;iBAMKE;MACHA;;sBACgBA;QACdA,wCAkOUA,aA3NdA;aANSA;QAIEA,WAHIA;QACXA,kCA+NUA,aA3NdA;;QAFIA,+BAEJA;K;kBAEKC;qBACQA;MACXA;QAAkBA,YAGpBA;MADEA,OAAOA,wBADMA,uCAEfA;K;UAYYC;MACVA;;sBACgBA;QAC8BA;QAA5CA,SAOJA;aANSA;mBACMA;QAC8BA;QAAzCA,SAIJA;;QAFIA,OAAOA,gBAEXA;K;UAEGC;;mBACUA;MACXA;QAAkBA,WAIpBA;MAHeA;MACDA;MACZA,gCAA4BA,WAC9BA;K;aAEcC;;;MACKA;MAGkBA;MAHnCA;uBACgBA;QAEdA,kDADqBA,wBAAqBA;aAErCA;oBACMA;QAEXA,+CADkBA,qBAAeA;;oBAQxBA;QACXA;UAAiCA,YAAfA;QE3GkBC;qBF6GvBD;QACbA;UACEA;;eAEAA;;UAEYA;UACZA;;;YAGEA;;iBAEAA;;;;IAlBNA,C;aAiEKE;;;;MACSA;yBACkBA,gBAErBA,uBAAeA,kBAFxBA;kBACYA;QACHA;QAASA;QAAhBA,gCAAsBA;0BACUA;UAC9BA,sBAAMA;;IAGZA,C;kBAEKC;;sBACUA;MACbA;QAAoBA,aAiDtBA;MAhDgBA,iCAAOA;qBAIPA;MAHFA;MAIZA;QACcA;uBACEA;QACdA;+BACeA;UAEbA;;;kBAKOA;MACXA;QACcA;uBACEA;QACdA;;UAKEA;;;kBAKOA;MACXA;QACcA;uBACEA;QACdA;uBAEeA,MADHA;0BAEGA;UACbA;kCACYA;YAEVA;;;;MAMNA,YADAA,2BAEFA;K;wBAEKC;;MACwBA;MAIAA;eAkCfA;;YApCVA;;MAEFA;IACFA,C;gBAiEMC;MAEJA,YAAOA,CEjT6BJ,mCFkTtCI;K;;;sBAiCIC;MACFA;;QAAoBA,SAMtBA;sBALeA;MACbA;mBACgBA;QAAdA;UAAkDA,QAGtDA;;MADEA,SACFA;K;;;cAoDQC;MAAUA,4BAAKA,oBAAOA;K;gBAIdC;MAyBhBA,aAxBgCA;MAA9BA,qCAAoCA,qBAwBtCA,2CAvBAA;K;;;eAyBMC;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAEzBC;;oBACQA;sBACEA;kBACmBA;qBAAKA;QACnCA,sBAAMA;6BACaA;QACnBA;QACAA,YASJA;;QAPIA,mCAAWA;aAIXA;QACAA,WAEJA;;K;4BAtBGC;;K;;;ElB7HHC;gBuCxTgBA;MAAYA,oCvC0TLA,2BuC1TKA,yBvCwT5BA,oCuCxTiDA;K;eAE/CC;MAAwBA,OAAIA,4BAAOA;K;WAyIzBC;;MAA0BA,OvCmQtCA,2EuCnQqEA,QvCmQrEA,2EuCnQuEA;K;cA6WhEC;MAAcA,OAWJA,mDAXsBA;K;;;apBhgBlCC;;;;MACWA,gCAAdA,4BACwBA,WADxBA;;QACkBA;QAAhBA,gCAAsBA;;IAE1BA,C;cAoEQC;MAAUA;aAAKA,iBAAMA;K;cAItBC;MAAcA,kCAAiBA;K;;;;UAaxBC;;;aACHA;YACHA;QAEFA;eACAA;MC2YWA;;QA2BfC;MA3BeD;;IDxYZA,C;;;;EA0M0BE;UAAnBA;MAAmBA,2CAASA;K;aAgBnCC;MACHA,2EAAaA;IACfA,C;cAIQC;MAAUA,4BfvTAA,oBeuTWA;K;YACbC;MfZhBvM,aeYwBuM;iDfpTAA,oBAwSxBvM,wCeYiCuM;K;cAE1BC;MAAcA,OfjEQA,0BeiERA,kBAAeA;K;;;;;;;aE9F7BC;MACLA;MAAIA;gBkB7McA;MlB6MlBA;QAAmBA,SAIrBA;MAsCAA,8FAxCuBA;MACPA,EAD2CA;MACzDA,wCACFA;K;;;YAmEWC;MAILA;MAaFA;eAXsBA;MACPA;MACAA;MAEEA;MACnBA;QACEA;MbkgC6ChkB;Ma//BtCgkB,IAATA,oDACEA;MASFA;QAAsBA,aAIxBA;MADEA,WACFA;K;;;aA2LUC;MACSA;yDAAkCA;MACnDA;QAAkBA,ObozB6BjkB,iBa/yBjDikB;MAQIA;MAXWA;QAAiCA;kBA8H1CA;MAAJA;QACEA,kBAAMA;MAERA;QACEA,kBAAMA;aAMRA;MAtIAA,SACFA;K;;;YA2GWC;;kBAIWA;MAApBA;QACWA,KAATA;QACAA,WAMJA;;MAJEA;QAAkBA,Ob4rB6BlkB,iBaxrBjDkkB;MAHeA;MACJA,KAATA,mFAAmDA;MACnDA,aACFA;K;;;;;UD5D2BC;MAClBA;MACsBA;eADzBA;;6BAASA;ewBvkBgCC;QxBkgB7CX;;MAwEmBU;;QACfA;IACDA,C;;;;OjB7ZSE;MAAEA;oBAIQA;MAHpBA,0CAlC8BA,gCA2BXA,4CAUnBA,gBAAeA,MAAKA;K;gBAGhBC;MAAYA,OAAOA,kBAAKA,aAAQA,cAAaA;K;coBwa9CC;MACMA;mCpB3ccA;YoB4cdA,sBpBzceA;YoB0cfA,sBpBvcaA;YoBwcbA,sBpBrccA;coBscZA,sBpBnccA;coBocdA,sBpBjccA;aoBkcfA,wBpB/boBA;kBAGXA;6BoB6beA;;eAChCA;QACFA,2EAIJA;;QAFIA,qEAEJA;K;;EwB3hBqBC;cAAdA;MAAcA,6BAAeA;K;;E3B6JKC;kBAAzBA;MAAcA,2CAAkCA;K;;;cf1IzDC;mBACDA;MAAJA;QACEA,8BAAkCA,wBAGtCA;MADEA,yBACFA;K;;;;kBAqFWC;MAAcA,kCAAoBA,wBAAwBA;K;yBAC1DC;MAAqBA,SAAEA;K;cAE3BC;MAI6CA;qBAH9BA;;uBAEGA;;iBAELA;MAGGA,UAFhBA;QAAWA,aAKlBA;MADEA,uDAD0BA,qBAAaA,yBAEzCA;K;;;;;EAW+BC;oBAAtBA;MAAgBA,qBAAMA,cAAYA;K;kBAsKhCC;MAAcA,mBAAYA;K;yBAC1BC;;oBAGSA;kBACFA;MAChBA;QAEgDA;WAGzCA;QAC0CA;WAC1CA;QACoCA,gEAAQA;;QAKXA;MAExCA,kBACFA;K;;EAkB8BC;oBAAtBA;MAAgBA,oBAAMA,cAAYA;K;kBAgF/BC;MAAcA,mBAAYA;K;yBAC1BC;MAjFmBA;QAqF1BA,qCAMJA;mBAJMA;MAAJA;QACEA,+BAGJA;MADEA,0CACFA;K;;;;;;ceyDOC;MAzFPA;;YA2FSA;wBACSA;0BAEdA;;UA5DF3B;QA8DmB2B;;cACfA;;MAKFA,KAFmBA,8BAEIA;MASGA,yCAAaA;MACbA;MAG1BA,uDALkCA,kBwB9kBShB,8FxB8lB/CgB;K;;;cfxDOC;MAAcA,uCAAyBA,QAAQA;K;;;cAc/CC;MAELA,oCADmBA,QAIrBA;K;;;cAoBOC;MAAcA,2BAAaA,QAAQA;K;;;cAcnCC;mBACDA;MAAJA;QACEA,kDAIJA;MAFEA,sDACaA,8BACfA;K;;;cAOOC;MAAcA,sBAAeA;K;kBAEpBC;MAAcA,WAAIA;K;;;;cAO3BC;MAAcA,uBAAgBA;K;kBAErBC;MAAcA,WAAIA;K;;;;cmBrnB3BC;MAGLA,2BAFuBA,QAGzBA;K;;;cAmDOC;;sBAEkBA;;qBAIJA;qBACGA;0CAEiCA;MAArDA;QAIIA;MAAJA;kBACaA;UACAA;QAEXA,6BAgENA;;kGA3DIA;QACaA;mCAAOA;QAAPA;QACXA;UACEA;YACEA;UAEUA;UAzBdA;eA2BOA;UACLA;UACYA;UA7BNA;;;MAsEDA;MA/BTA;QACaA;mCAAOA;QAAPA;QACXA;UAKWA;UAHTA;;;MA3CiBA;MAmDrBA;QAvCuCA;QA2CrCA;UACQA;;;UAEDA;YACGA;;YA3DSA;;YA+DTA;YACFA;;UApD6BA;;;QAwDAA;QAAPA;QApEXA;;MAsErBA,yBAFeA,sEAEyBA,oDADCA,gBAS7CA;K;;;WCsEYC;;MAA4BA,qFAA2BA,gBAA3BA,6BAAqCA;K;cA2RrEC;MAGiBA;;MACvBA,gBAAOA;QACLA;MAEFA,YACFA;K;eA+QEC;MACWA;;MACSA;MAEpBA,wBAAOA;QACLA;UAAoBA,OAAgBA,sBASxCA;QARIA;;MAEFA,sBAAiBA;IAMnBA,C;cAgBOC;MAAcA,uDAAqCA;K;;ELhvBhCC;gBAAlBA;MAAYA,oDAAcA;K;c4B/C3BC;MAAcA,aAAMA;K;;E5B8BIC;OAHjBC;MAAoBA,qBAAsBA;K;gBAGhDD;MAAYA,wCAA+BA;K;cAG5CE;MAAcA,yBxBmaLA,uCwBnaiDA;K;kBAGzDC;MACNA,sBAAwBA,2DAAqBA;IAC/CA,C;mBAGSC;MAAeA,yCAAgCA;K;;;;;;c6BhBjDC;MAAcA,SAAWA;K;;;;c7B6cxBC;MAAUA,qBAAUA,OAAMA;K;cA4B3BC;mBAAuCA;MAAzBA,sCAAmCA;K;;;UQzfxDC;MAEEA;MAAIA;QACFA,QAoBJA;eAlBMA;;QACFA,OAAOA,eAiBXA;MAfQA;QACiBA;QACrBA;QACkBA,6BAAlBA;;UAC6CA,gCAASA;;QAEtDA,mBASJA;aAReA;QAEYA;QAAvBA;QACAA,0CAAqBA;QACrBA,oBAIJA;;QAFIA,QAEJA;K;;;EAqf8CC;UAAPA;MAAOA,iCAAmBA,6BAAEA;K;;;;UAC9BA;MAInCA;QACEA,OAAOA,+BsB9aXA,8CtBmbCA;MADCA,OAAOA,iCACRA;K;;;;UAoFDC;MAEEA;MAAIA;QACFA,QAqDJA;eAlDMA;OAA+BA;MAA/BA;QACFA,OAAOA,eAiDXA;MA9CEA;QAxDqBA;QLpKrBC;UAEEA,kBAAiBA;QAiBnBA;QK0MED,OzB9nBJE,yCyB2qBAF;;MA1CEA;QAGEA,sBAAMA;MAGRA;QACEA,OAAOA,2CAmCXA;MA/GYA;;;QAgF6BA;QACrCA;QAhGsCA;;QAmGtCA;UACEA,cAAaA,UADfA;QAGAA,YAAiCA,iCAAjCA;UACgBA;UACEA;uCAAQA;4BAARA;UAChBA;YACEA,iCAAsBA,aAvmB5BA;;QA0mBEA,iBAiBJA;;MAdEA;QACYA;QAEaA;QAAvBA;QAhnBFA;QAmnB2BA,0CADzBA;UACEA,gBAAeA,YAAQA;QAEzBA,iBAMJA;;MADEA,QACFA;K;;;;csB5jBOG;MAELA,uDADiBA,2CAEnBA;K;;;qBCwGAC;wBACeA;MACbA;mBACwBA;UAEpBA,MAKNA;MAFEA,sBAAMA;IAERA,C;aAiCIC;MACFA;;QACEA,sB9CxCJA;M8C2CEA;QAEEA;;;U3CkHyCC;;Q2CrH3BD;eAUhBA;M3C0GAC;MAiaAD;M2C1gBYA;MAC0BA,uBA1PjCA;MA2PLA;QA9CAA,uBACYA;Q3CyZVA;Q2CxWAA;UAEEA,0BAYNA;QX9PSA;QW0PLA;UACEA,aAGNA;;K;;;mBtB5NGE;;K;;;;eAsFMC;MAkDAC,QAjDHD;QACFA,YAGJA;MADEA,WAAOA,SACTA;K;mEAqCaC;MAQJA;IA4BTA,C;6DApCaC;;K;oCAAAD;MAQJA;;;6GARIA;QAQJA;;;;;;cAAPA;cGqCEA,UAAUA;yBHvIFA;;gBGuIRA,UAAUA;2BHjCLA;;uBDhKPA;cCoK2CA,iEAAiBA;;;;cDkIvDA,4BCjIKA,OuB/LcA,QvB4LcA,qDAEhCA;;gBD1GDA;;gBC4GLA;gBAKEA,gBG6BAA,OAAUA,2BH7BUA;+BAChBA,gBAA0BA;6BAC5BA,cAD4BA;kBDlH3BE,WCuFLF,oBuB3KwBE,QvBwMRF,gHAGOA,8EAEWA;;;cA/BvBA,WAmCNA;;cAnCMA;;;MAQJA;IARIA,C;yBAsCTG;MrBzOqBA;sDoBuCvBA;MCuMEA;QDvMFA;QCyMIA,gBGrBFA,OAAUA;;MVpGLA;QO8H6BA;QAAhBA;6BAClBA;;UAC2BA;uCAAIA;qBAAJA;UACzBA;;;cAKuDA;cADnDA,gBGjCNA,OAAUA;cHmCJA,SAkBVA;;cAhBUA,gBGrCNA,OAAUA;cHsCJA;;;QAGNA,sBAAMA;;MAERA;;UAEIA,SAQNA;;UANMA,QAMNA;;UAJMA,QAINA;;UAFMA,QAENA;;K;mBAEUC;MACJA;MrBgiC2C5nB;MAnzCxB4nB,gDoBuCvBA;MCkPEA;QDlPFA;QCoPIA,gBGhEFA,OAAUA;;QH2DIA;;MD/OhBA,iCA6CKA,8BA7CLA;MA6CKA;QC2MkCA,oCDxPvCA,SA6CKA,8BA7CLA;;QwB8SwBA,+CvBnDwBA,oBwBxT9CA;MxB0UFA,OApOFA,oEA0OAA;K;kBAEKC;MuBrL0BA,oCvBwLMA,qCAAjBA;;MDtNbA;IC2NPA,C;oBAEaC;MAIXA;;IAsGFA,C;oCA1GaA;MAIXA;;;8DAJWA;QAIXA;;;;;;;;;cAGiBA;cAFVA;;gBDpSPA,2BCuSmCA;kBDvSnCA,0BCySmCA;;;+BAxORA,WAAWA,WAyOnBA;;kBACbA;;;gBDzODA;;gBC4ODA;;;cAGaA;cAEfA;cGxHAA,UAAUA,qDHyH6BA,OAAOA,+CAA0CA,gCAA2BA;cAEnGA,mDAAqBA;gDAAkBA;oCACxCA;;+BAGTA,gBAA0BA;6BAC5BA,cAD4BA;kCAKTA;kCACNA;kCA9PTA;;kBDALJ,WCuFLI,oBuB3KwBJ,QvB8UNI;;;gBAUdA;;;8BApQMA;;cAwQcA,uFAA8BA;2BAGhBA;2BAA0BA;;8BArM5DA;;gBAEFA,oBsBjJuBC,8BtBiJ6BD;cAGtCA;;gBA0JoCA;crBucpDC;;oCqB7lB6BD;cAE7BA;cAEUA;;;crBwLVE;cA+XAF;;cAAqBA;cA/XrBE;cA+XAF;8BqBzXyBA;;cDnVzBA;;;cwBlBwBA,avBuWhBA,uFAG6BA;cACnBA;;cANCA;mCuBnQWA,kBxB4CzBA,iCC+NYA,2EAAsCA,OAAOA,4DAR3CA;;;cAULA;cGzKZA,UAAUA,iEH4KwCA,OAAOA,6BrBvYpCA,2DqBuY2FA;cY1VjFG;;cZ6V/BH,uCrB04BSA,oBqBz4B4BA;cACrCA,wBrB5YqBA;cqB6YrBA;cACAA,wBAA6BA;cAE7BA;6BAEIA,gBAA0BA;2BAC5BA,cAD4BA;gBDzS3BJ,ewBpFmBA,QvB+XRI,gHAGOA,4CACNA,6BA/SPA;;cGkHRA,UAAUA,wFHlHFA,qCAuTgEA,6CAAqBA,+CAAqCA,kCAA6BA,oCAA+BA,OAAOA,mCAA6BA,uBAAsBA,oBAAaA;;;;;;;;;cAtFvQA;cAwFEA,gBG3KAA,OAAUA,mDH2KkCA;6BACxCA,gBAA0BA;2BAC5BA,cAD4BA;gCAKTA;gCACNA;gCAhUPA;;gBDALJ,WCuFLI,oBuB3KwBJ,QvBgZRI,sKAODA;;;;;;;;;;;;;;cAtGJA;;;;;;MAIXA;IAJWA,C;oBA4GAI;MAIPA;;IA6NNA,C;oCAjOaA;MAIPA;;;8DAJOA;QAIPA;;;;;;;;;cAAWA;gBACXA;cAEJA;cGvNEA,UAAUA,2DHuNyCA,OAAOA;;gBAIxDA,8BAAkBA;cAEjBA,0CAEQA,OUhbKA;gBVibhBA;+BA1VyBA,WAAWA,WA2VrBA;;kBAAiCA;;;gBGjOhDA,UAAUA;gBJzHPA;gBIkHHA,UAAUA;;gBH4OVA;;;8BA/VyBA,WAAWA,WAkWvBA;;6BAEAA;uBAA2BA;;sBAApBA;kBACcA,gFACnBA,OAAOA,0BACPA,OAAOA;kBGpPtBA,UAAUA,wCHsPeA,0CAA8BA;kCAEnDA;kBU+CS9E;;0BN7hBf8E;sBrC+esBC,EqC/etBD;oBrC+esBC,EqC9etBD;0BAoBOA;4BACFA;wBrCydiBC;+BqCxd0BD;0BAAoBA;;;;wBQPlEA;;sBtC+DQE;;sB0BkaWF,kEAAwBA,OAAOA;;wBAAxBA;;;;;sBG3P1BA,UAAUA,mEH2PgBA;+DYzaKD;sBZ6azBC,oCrB0zBGA,oBqB1zByCA,gEACnBA,OAAOA;sBAChCA;sBG1PNA,UAAUA;sBJzHPA;;sBIkHHA,UAAUA;oBAAVA,UAAUA;oBJlHPA;;oBC2XCA;;;oBAEAA;;;;;8BA7XIA;;cAoYcA,uFAA8BA;cAExBA,4EAAwBA,OAAOA;cAC5CA;cACAA;2BACGA;2BACLA;;;gBAAOA;;;;;cADFA,+CACEA,0BAAgCA,OAAOA;cAE3CA,kBAAhBA,4BAAgBA;gBAChBA;cG3RAA,UAAUA,2EH8RgDA,kCAAwBA,OAAOA,6BAAsBA,oCAAwBA,gCAAoBA,0BAAcA;uDAQ3IA,WC9bVA;+BD+bdA,gBAA0BA;6BAC5BA,cAD4BA;kCAKTA;kCACNA;kBD/ZdR,WCuFLQ,oBuB3KwBR,QvB+eNQ,2IA3ZRA;;;gBAsaNA;;;gBAEEA;;;;cA0EFA;mCAAMA,uDAANA;;;;;;;;;;;cAJFA;yBAMEA,cAAyBA;cACzBA;cGnYFA,UAAUA,4DHmY4CA;cACpDA;mCAAMA,qDAANA;;;;;;;;;;;;;qBAGEA;;gBACIA;gBAANA;;8BAKFA;gBCxgBFG;gBACAA;cE0HEH,UAAUA,iFHgZwDA,OAAOA,4BrB3mBpDA,mDqB2mBmGA;cY9jBzFD;;cZkkB/BC,uCrBqqBSA,oBqBpqB4BA;qBACrBA;gBAASA;cAAzBA,wBrBjnBqBA;cqBknBrBA;6BAEIA,gBAA0BA;2BAC5BA,cAD4BA;gBD3gB3BR,WCuFLQ,oBuB3KwBR,QvBimBRQ,gHAGOA,4CACNA,6BAjhBPA;;cGyHRA,UAAUA,2FHzHFA,8CAyhB6EA,8CAAoCA,kCAA6BA,oCAA+BA,OAAOA,mCAA6BA,uBAAsBA,0BAAmBA,yBAAaA;;;;;;;;;cAvJjRA;6BAyJMA,gBAA0BA;2BAC5BA,cAD4BA;gCAKTA;gCACNA;gCAjiBPA;;gBDALR,WCuFLQ,oBuB3KwBR,QvBinBRQ,sKAODA;;cAIbA;;;;;;;;;;;;;cA/NSA;;;;;;MAIPA;IAJOA,C;;;UAiGTI;MACEA;;;oDADFA;QACEA;;;;;;8BAAoBA;;;cD7exBA;8BCkfkCA;qBAASA;8BAAkBA;;;cuBpgBrCA,cvBigBVA,4EAEQA,uBACqBA;cACnBA;;;cANNA;mCuB7ZYA,kBxB4CzBA,yBCwXSA,cAAcA,gBACLA,8CAE6BA,qBAASA,wDAV3CA;;;cAaGA;;gBAbjBA;cAeAA;cGxUFA,UAAUA,mCxB3NWA,0DqBoiB8DA;qBAE7EA;;gBACFA,sBAAMA;cG5UVA,UAAUA,mCxB3NWA,mDqB0iB8DA;+BAC7EA,qBAAiBA;cAArBA;;;cGzUFA,UAAUA;cH4UNA;mCAAMA,yCACFA,kBAAeA,iCADnBA;;;;;qBAIEA;0BAA0BA,2BACAA,qBAC1BA;gBGzVNA,UAAUA,kFH2VmEA,0BAA2BA,kCAAyBA,sCAA8BA;gBG3V/JA,UAAUA;kBH+VNA,cAP4BA;uBAWTA;uBACNA;uBAtdTA;;gBDALZ,ewBpFmBA,QvBsiBNY;;;cAUhBA;;;MAjDEA;IAiDFA,C;;;;UAEAC;MACEA;;;oDADFA;QACEA;;;;;;;qBAAIA;8BAAgBA;qBAheGA;qBAAWA;qBAgeHA;;gBAE7BA,sBAAMA;;cAGWA;mCAAMA,eACrBA,cAAcA,aAAqBA,6BADpBA;;;;cAEDA;mCAAMA,kCACpBA,cAAcA,WAAuBA,6CADvBA;;;;qBAGRA;;cAANA;mCAAMA,+BA1ewBA,WA0esBA,6BAApDA;;;;;cAEJA;mCAAMA,yDAANA;;;;cACFA;;;MAbEA;IAaFA,C;;;;cC/lBGC;MACLA;6CAA+BA,4CAA+BA,mDAAsCA,gDAAwCA,qDAAmCA,4BACjLA;K;;;8BAYsBC;;kBAChBA;MACKA,MADcA;QACrBA,oCAgBJA;gBAdaA;;MACXA;QACSA,8DACGA;kBAINA;cSEYC;UTDdD;QAGFA;;MAEFA,WACFA;K;yBAEsBE;MACCA;kBAArBA;MAKAA,yBALAA,kDAGcA,wCAFJA,aAKZA;K;;;;uBA8CKC;;kBACCA,WAAWA;MAAfA;QACEA,MAQJA;;QAHIA,gBEmKAA,OAAUA,sCFnKeA;aACzBA;;IAEJA,C;eAamBC;MACbA;;;yDADaA;QACbA;;;;;;;;cAAkBA;sDAAqBA;;gBAEzCA;;;;;;;;cAGUA;mCsBakBA,kBxBYzBA,OA5FLA,yBEmEkBA,OFnElBA,iBA4FKA,+EEzBOA;;;cAEEA;ctB5GSA;csB6GrBA;;;;;;;;;;;;cAJFA;cAMEA,gBEwIAA,OAAUA,oCFxIkBA;cAC5BA;;;;;;;;;;;;;;;;cAEJA;;;;;;MAbMA;IAaNA,C;gBAEmBC;MACbA;;;0DADaA;QACbA;;;;;;cAAkBA;sDAAqBA;;gBAEzCA;;;;;8BAE0CA,WAAWA;cAA1CA;mCAAMA,gEAANA;;;;cACKA;mCAAMA,+CAAwCA,mEAA9CA;;;cACFA;mCAAMA,iEAANA;;;;cAChBA;mCAAMA,6EAA6CA,kDAAnDA;;;cACAA;;;;;;cACFA;;;MATMA;IASNA,C;qBAEsBC;MAEhBA;;;+DAFgBA;QAEhBA;;;;;;;cAAcA;mCsBXYA,kBtBYzBA,aF5FLA,yBE2FwBA,OF3FxBA,uCwBgG6BA,0CC7J3BA,iBzB6DFA,iCyB7DEA,evB8JwCA,kCsBnHlBA,QtBmHlBA,0GANYA;;;cASlBA;;;;;;cACFA;;;MAVMA;IAUNA,C;eAEQC;;iBA/EWA;;kCAgFgBA;MAA1BA;8BAAaA;MAApBA,SAAOA,IACTA;K;qBAEaC;MACPA;;;+DADOA;QACPA;;;;;;;cF5GJA,8BE4GwBA,OF5GxBA;;cwBlBwBA,atB+HQA;;gBAAgCA;cAD9CA;mCsB5BYA,kBtB6BzBA,sDAC8CA,kCsBhI3BA,QtBgITA,0GAFGA;;;cAKLA;mCAAMA,mDAEjBA,WAAWA,wCAFAA;;;cAIbA;mCAAMA,uFAANA;;;yBA5DAA;cA8DFA,WA7DEA;;cA6DFA;;;MAXMA;IAWNA,C;YAZaC;;K;2BAcAC;MACJA;;;qEADIA;QACJA;;;;;;cAAPA,gBEwEEA,OAAUA;;gCF1KKA;;gBAoGGA,WAAlBA,oDAA2CA;;8BApG5BA;;cAsGjBA,2CAAcA;;cAChBA;;;MALSA;IAKTA,C;gBAIeC;MACTA;;;0DADSA;QACTA;;;;;;;cACmBA,oCADyCA,YFpIhEA,0ByB7DEA;czB6DFA,8BEwI0BA,OFxI1BA;cwBlBwBA;;gBtB4JOA;;csB5JPA,atB8JlBA;cAA2CA;;;;cAJ7BA;mCsBxDUA,kBtByDzBA,wDAKgCA,kCsBhKbA,QtBgKlBA,mHANcA;;;yEAUkBA;;cAAtCA;;;;cACFA;;;MAfMA;IAeNA,C;aAKkBC;MACZA;;;uDADYA;QACZA;;;;;;cAAmBA;;cFzJvBA,8BE4JmBA,OF5JnBA;cwBlBwBA;;gBtBgLSA;;cAFpBA;mCsB5EiBA,kBxB4CzBA,oFEgCQA;;;ctBnMUA;;csBuMvBA;;;;cACFA;;;MARMA;IAQNA,C;+CAzImBC;;K;;;qBGtFdC;MACCA;;QACFA,MAYJA;MrC6dwBpB,sCqCneGoB,0CAEuBA;QAE9CA;IAEJA,C;WASKC;UAEHA,2BADAA;UAEAA;IACFA,C;;EFxBgDC;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAwBvCC;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;;UAOrBC;MAChBA;MYPZC,0BZOmBD,2BAAsBA,MAAMA,qBAAgBA;IAC9DA,C;;;;UAMuBA;;;MAEFA;MADlBA;MC4NAA,UAAUA;MJzMZA;;MGdoCA,4BHcpCA;MGb8CA,0BsBhD5CA;MtBiD8DA,mCsBjD9DA;MtBkDkDA,6BsBlDlDA;MtBmD8CA,4BsBnD9CA;MtBoDkDA,6BsBpDlDA;MtBqD8DA,mCsBrD9DA;MtBuDkBA;MAElBA;QCoNAA,UAAUA;QDlNRA,MAaHA;;MATKA;MHFNA;;MGIEA;IAODA,C;;;;UAGiBA;MACdA;;;oDADcA;QACdA;;;;;;cAAuBA,oCqBzCAA,WxB0B3BA;cGgBcA;cACWA,qBAAbA;cACZA;cCgLEA,UAAUA,oCDhLgBA,4BAAgBA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAC5CA;;;cAGoBA;cAC2BA,4BAArBA;cAELA;cAAOA,eAAPA;kCvBotCVA,oBa3uCuBE,CATDC,2BUkCiBH,YAAvBA;cACSA,cAAPA;cACDA;cAA4BA;cACzBA,wEvB+sCpBA,oBa3uCuBE,CATDC,2BUuC6BH,YAA/BA;cAETA;cAAuBA;cAE/BA;4EAA2CA;cC8JvDA,UAAUA,wDD5JiCA;cnC84G7CA;;cgCp7GAA;;;cG0CMA,qFDjF6CI;cFuCnDJ,gBAkEKA,YwBpFmBA,QrB8DDA;;cAKjBA;;;;cAIyCA,4BAArBA;cC6IxBA,UAAUA;cD3INA;cHvDNA,8BAkEKA,YwBpFmBA,QrB0EDA;;cAMnBA;;;;cAG+BA,oBAAfA;cACeA,sBAAfA;oBAGVA;;c/B6SZK;cyBgDIL,wE3B7O4BA,8C2B6O5BA;gCM5VIA;;gBC4HJA,UAAUA,+BD3HqDA;2BFkC7DA,gBAA0BA;kBGgG5BM,UAAUA;yBH7FVN,cAAyBA;;gBGsFzBM,UAAUN,uCHpFmBA;uBAC/BA;;cD/GAA,8BAkEKA,YwBpFmBA,QrB4FDA;;cAOnBA;;;;;;cAIaA;cACcA,kBAAbA;cAC6BA,4BAArBA;cACNA;;cACuBA,kBAAtBA;cACsBA,kBAAtBA;cAC0BA,4BAArBA;cCuGxBA,UAAUA,gDDpGwBA,2BAAeA,2DAAkDA,mDAAwBA;cAErGA;;gBCgHtBA,UAAUA;gBJhNZA,kBAkEKA,YwBpFmBA,QrBqHCA;;gBAUjBA;;;cAG2CA;cAA/BA;cAGDA;cADbA;mCAAMA,kEAKEA,wEALRA;;;cHlHNA,kBAkEKA,YwBpFmBA,QrB4IDA;qBASTA,cFvBaA;;cEyBvBA;;;;cAG+BA,sBAAfA;cC0DlBA,UAAUA;cDxDNA;cH1INA,8BAkEKA,YwBpFmBA,QrB6JDA;;cAOnBA;;;;;;mCvB2lCOA,oBa3uCuBE,CATDC,2BU6J0BH,YAAXA;cACXA,oBAAhBA;cAC0BA,4BAArBA;cACFA;;gBCuDtBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrB8KCA;;gBAMjBA;;;8BAEmCA,mBACdA;;;cAAvBA;;;cC6BJA,UAAUA;cAOVO,UAAUD;yBF7MZN;cACAA,oCAAsBA;;cCwKhBA;;;;cAI2CA,4BAArBA;cCyB1BA,UAAUA;cDtBJA;mCAAMA,sDAEDA,gDAFLA;;;;;cH5KRA,8BAkEKA,YwBpFmBA,QrBmMDA,2EAEEA;;cAOrBA;;;;;;cAIiBA;cAC0BA,4BAArBA;cACqBA,4BAArBA;cACFA;;gBCetBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrBsNCA;;gBAMjBA;;;8BAEmCA,mBAEdA;;cAAvBA;;;cCZJA,UAAUA,2DDa+CA;cAE/CA;mCAAMA,oCAAkCA,aAAWA,oCAAnDA;;;;;cAHNA;;;;cCZJA,UAAUA,mFDkBsDA;cACjDA;mCAAMA,sDAEVA,aAAWA,oCAFPA;;;;;;cHrNjBA;cAkEKA,ewBpFmBA,QrB4ODA,0IuBlRJQ,CjC+CQC,gEiC/CQD;;cvB4R/BR;;;;cAGiBA;cACcA,sBAAfA;cCtClBA,UAAUA;oBDyCFA;;c/BuIZK;cyBgDIL,wE3B7O4BA,+C2B6O5BA;gCMtLIA;;gBC1CJA,UAAUA,kDD2CwCA;gBAC9BA;qBFrJlBA,gBAgB0BA;kBGgG5BU,UAAUJ;mBH7GVN,cAgByBA;;gBGsFzBU,UAAUV,wCHpGoBA;iBAChCA;;cD/FAA,8BAkEKA,YwBpFmBA,QrBmQDA;;cAOnBA;;;;;;cAIiCA,oBAAhBA;cAC0BA,4BAArBA;cACqBA,4BAArBA;cACFA;;gBC/CtBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrBoRCA;;gBAMjBA;;;;uCAEmCA,mBAEdA;cAAvBA;;;cC1EJA,UAAUA;cD4EEA;mCAAMA,oCAAkCA,qCAAxCA;;;;;cAFRA;;;;cC1EJA,UAAUA;cDgFEA;mCAAMA,sDAEPA,qCAFCA;;;;;;cHlRdA;cAkEKA,ewBpFmBA,QrBwSDA,gJuB9UJQ,CjC+CQC,gEiC/CQD;;cvBuV/BR;;;;0CvB88BOA,oBa3uCuBE,CATDC,2BU0S2BH,YAAlBA;cACKA,4BAArBA;cACFA;;gBCrFtBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrB0TCA;;gBAMjBA;;;yBD7SRA,mBAAmBA;cEiMjBA,UAAUA,wCD+G0BA;yBAClBA,6BAAdA;;gBChHJW,UAAUX,0CH9FYA,gDAAkCA;iBAnC/BW,WAAWA,WAoC3BX;;cDrGXA,8BAkEKA,YwBpFmBA,QrBwUDA;;cAMnBA;;;;cAG2BA,oBAAbA;cACiBA,sBAAfA;cC9HlBA,UAAUA;cDiIkBA,gDAApBA;;2BFzMNA,gBAjB0BA;kBGgG5BY,UAAUN;yBH5EVN,cAjByBA;;gBGsFzBY,UAAUZ,wCHnEoBA;uBAC3BA;;cDhILA,8BAkEKA,YwBpFmBA,QrBwVDA;;cAMnBA;;;;cAG+BA,sBAAfA;cC7IlBA,UAAUA;cDgJkBA,gDAApBA;;;;;uBAEMA,cAAyBA;gBHpVzCA,kBAkEKA,YwBpFmBA,QrBuWCA,0FAEUA;;gBHvVnCA,kBAkEKA,YwBpFmBA,QrB+WCA;;cAQrBA;;;;cCrJFA,UAAUA,8CDuJ8BA;;;;;cAE3CA;;;MA1VKA;IA0VLA,C;;;EApS4Ca;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAsKpBA;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAwFTA;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAepBA;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;;UAyB3Db;MACfA,0BAAcA;IACfA,C;;;;OwBxXac;MAAEA;oBAAwDA;MAAtCA,uCAAkBA,gBAAeA,MAAKA;K;gBAchEC;MAAYA,iBAAKA;K;cAGlBC;MAAcA,gBAAIA;K;;;cC1ClBC;MAAcA,iBAAIA,MAAMA,mBAAQA,yBAAaA,QAAQA;K;;;gBxBZjDC;MACuCA,aAA9CA;mCAAQA,KViXQC;iBUjXyCD;MAAzDA,sDAAqEA;K;aAgF/DE;MACFA;cAEFA;iBAGeA;UAAMA;QASlBA;;QAPYA,yBAAKA;UAAMA;QAOvBA;;MAAPA,qBACFA;K;SA+EKC;;qBuBnI4BC;MvB4GQD,2BuB5GOC;QvBmJ5CD;UAE0BA;UACkBA;;QAKfA;QpCqRT7C;mC4DpdS6C;QAHjCA;iBxBoMQA;UACFA;;UAEAA,qBAAKA;;IASXA,C;gBA0DkBE;cACkBA;qBACxBA;;Uc4DZC;Ud5DYD;;QAARA,Oc3SJE,2BAsH4BF,oBAtH5BE,+BdgTAF;;QAFIA,OAAOA,qBAAKA,cAEhBA;K;cAEKG;mBAA8BA;;0CczDvBA;QADLA;UAAcA,kBAAMA;QACzBA;;MdyDiCA,WAAwBA;K;mBAlQ9BC;;K;;;UAWEC;;uBAAoBA;MAc7CA;QACFA,kBAAMA;MAEJA;QACFA,kBAAMA;MAIIA;MAGZA;QAC2BA;;QAGhBA,0BAAOA;QACLA;;MAKfC,qCAH4DD;MAM1DE;UACEA,WAASA;;QAEDA;MAzCyBF,SAAmBA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mFrCU7CG,MACTA,6CADSA,A;6F8CAEC,MAAaA,oCAAbA,A;uG7CgyCiBC,MAC1BA,kCAAeA;;;;OADWA,A;mGAKAC,MAC1BA,kCAAeA;;;;OADWA,A;+FAKAC,MAC1BA,kCAAeA,4CADWA,A;6GAKAC,MAC1BA,kCAuNaA;;;;;;;KAQRA,GAhOqBA,A;yGAKAC,MAC1BA,kCAAeA,8CADWA,A;uHAKAC,MAC1BA,kCA4NaA;;;;;;;KAQRA,GArOqBA,A;uGAKAC,MAC1BA,kCAAeA,gDADWA,A;qHAKAC,MAC1BA,kCA+OaA;;;;;;KAORA,GAvPqBA,A;iHAKAC,MAC1BA,kCAAeA,kDADWA,A;+HAKAC,MAC1BA,kCAmPaA;;;;;;KAORA,GA3PqBA,A;qGcp3CRC,MAClBA,0CADkBA,A;6FW0iBCC,MbmkBnBC,cAnCSD,oBahiB+CA,4hBAArCA,A;gFAsLNE,MAAeA,oCAAfA,A;iDKhLTC,MNjiB8BA,kBMiiBDA,iBAA7BA,A;uEyBnhBYC;MAwLpBA,+BAFgBxF;MAEhBA;MAxLoBwF;K;2CMrChBC,MAASA,8BAATA,A;qDzB+RgBC,MAAOA,mBAAPA,A", + "x_org_dartlang_dart2js": { + "minified_names": { + "global": "$get$DART_CLOSURE_PROPERTY_NAME,794,$get$Logger_root,1171,$get$Random__secureRandom,1065,$get$TypeErrorDecoder_noSuchMethodPattern,1153,$get$TypeErrorDecoder_notClosurePattern,1154,$get$TypeErrorDecoder_nullCallPattern,1155,$get$TypeErrorDecoder_nullLiteralCallPattern,1156,$get$TypeErrorDecoder_nullLiteralPropertyPattern,1157,$get$TypeErrorDecoder_nullPropertyPattern,1158,$get$TypeErrorDecoder_undefinedCallPattern,1180,$get$TypeErrorDecoder_undefinedLiteralCallPattern,1181,$get$TypeErrorDecoder_undefinedLiteralPropertyPattern,1182,$get$TypeErrorDecoder_undefinedPropertyPattern,1183,$get$_AsyncRun__scheduleImmediateClosure,1061,$get$_Base64Decoder__emptyBuffer,1024,$get$_Base64Decoder__inverseAlphabet,1036,$get$_CopyingBytesBuilder__emptyList,1025,$get$_hashSeed,810,$get$logger,812,ArgumentError,312,ArgumentError$,814,ArgumentError$value,1185,ArrayIterator,815,AssertionError,311,AssertionError$,814,AsyncError,816,AsyncError_defaultStackTrace,1097,Base64Codec,817,Base64Decoder,818,Base64Encoder,819,BoundClosure,820,BoundClosure__computeFieldNamed,1016,BoundClosure__interceptorFieldNameCache,1035,BoundClosure__receiverFieldNameCache,1059,BoundClosure_evalRecipe,1103,BoundClosure_interceptorOf,1141,BoundClosure_receiverOf,1170,ByteBuffer,821,ByteData,822,BytesBuilder,737,Closure,823,Closure0Args,824,Closure2Args,825,Closure__computeSignatureFunctionNewRti,1017,Closure_cspForwardCall,1092,Closure_cspForwardInterceptedCall,1093,Closure_forwardCallTo,1112,Closure_forwardInterceptedCallTo,1113,Closure_fromTearOff,1116,Codec,826,ConcurrentModificationError,322,ConcurrentModificationError$,814,ConstantMap,827,ConstantMapView,828,ConstantStringMap,829,Converter,830,CryptorError,831,DART_CLOSURE_PROPERTY_NAME,794,DateTime,832,DateTime__fourDigits,1026,DateTime__threeDigits,1068,DateTime__twoDigits,1071,EfficientLengthIterable,833,EfficientLengthMappedIterable,834,Error,835,Error__throw,1069,Error_safeToString,1172,Error_throwWithStackTrace,1174,ExceptionAndStackTrace,836,Exception_Exception,814,FixedLengthListMixin,837,Float32List,838,Float64List,839,FormatException,324,FormatException$,814,FrameCryptor,345,FrameCryptor_decodeFunction_decryptFrameInternal,840,FrameCryptor_decodeFunction_ratchedKeyInternal,841,FrameInfo,842,Function,843,Future,844,IndexError,845,IndexError$withLength,1188,Int16List,846,Int32List,847,Int8List,848,Interceptor,849,Invocation,850,Iterable,851,IterableExtension_firstWhereOrNull,852,Iterable_iterableToFullString,1143,Iterable_iterableToShortString,1144,Iterator,853,JSArray,854,JSArray_JSArray$fixed,1110,JSArray_JSArray$markFixed,1151,JSBool,855,JSInt,856,JSInvocationMirror,857,JSNull,858,JSNumNotInt,859,JSNumber,860,JSObject,861,JSString,862,JSUnmodifiableArray,863,JS_CONST,864,JavaScriptBigInt,865,JavaScriptFunction,866,JavaScriptIndexingBehavior,867,JavaScriptObject,868,JavaScriptSymbol,869,JsLinkedHashMap,870,JsNoSuchMethodError,52,JsNoSuchMethodError$,814,KeyOptions,871,KeyProvider,767,KeySet,872,LateError,873,LegacyJavaScriptObject,874,Level,875,LinkedHashMap,876,LinkedHashMapCell,877,LinkedHashMapKeyIterator,878,LinkedHashMapKeysIterable,879,LinkedHashMap_LinkedHashMap$_empty,1023,LinkedHashMap_LinkedHashMap$_literal,1041,List,880,ListBase,881,ListIterable,882,ListIterator,883,List_List$_of,1057,List_List$filled,1106,List_List$of,1162,LogRecord,884,LogRecord__nextNumber,1055,Logger,350,Logger_Logger,814,Logger_Logger_closure,885,Logger__loggers,1042,Logger_root,1171,Map,886,MapBase,887,MapBase_mapToString,1150,MapBase_mapToString_closure,888,MapView,889,MappedIterable,11,MappedIterable_MappedIterable,814,MappedIterator,890,MappedListIterable,891,NativeByteBuffer,892,NativeByteData,94,NativeByteData_NativeByteData,814,NativeFloat32List,893,NativeFloat64List,894,NativeInt16List,895,NativeInt32List,896,NativeInt8List,897,NativeTypedArray,898,NativeTypedArrayOfDouble,899,NativeTypedArrayOfInt,900,NativeTypedData,901,NativeUint16List,902,NativeUint32List,903,NativeUint8ClampedList,904,NativeUint8List,96,NativeUint8List_NativeUint8List,814,NativeUint8List_NativeUint8List$view,1186,NoSuchMethodError,905,NoSuchMethodError_NoSuchMethodError$withInvocation,1187,NoSuchMethodError_toString_closure,906,Null,907,NullError,908,NullRejectionException,909,NullThrownFromJavaScriptException,910,Object,911,Object_hash,1136,OutOfMemoryError,912,ParticipantKeyHandler,343,ParticipantKeyHandler$,814,Pattern,913,PlainJavaScriptObject,914,Primitives__generalApplyFunction,1027,Primitives__identityHashCodeProperty,1032,Primitives__objectTypeNameNewRti,1056,Primitives_applyFunction,1077,Primitives_extractStackTrace,1105,Primitives_functionNoSuchMethod,1117,Primitives_functionNoSuchMethod_closure,915,Primitives_getDay,1118,Primitives_getHours,1119,Primitives_getMilliseconds,1124,Primitives_getMinutes,1125,Primitives_getMonth,1126,Primitives_getSeconds,1127,Primitives_getYear,1130,Primitives_lazyAsJsDate,1147,Primitives_objectHashCode,57,Primitives_objectTypeName,1161,Primitives_safeToString,1172,Primitives_stringFromNativeUint8List,1173,Primitives_trySetStackTrace,1179,Random__secureRandom,1065,RangeError,916,RangeError$range,1169,RangeError$value,1185,RangeError_checkNotNegative,1086,RangeError_checkValidRange,1087,Record,917,Rti,918,Rti__getCanonicalRecipe,1028,Rti__getFutureFromFutureOr,1029,Rti__getQuestionFromStar,1030,Rti__isUnionOfFunctionType,1038,RuntimeError,919,S,14,SentinelValue,920,SifGuard,921,StackOverflowError,922,StackTrace,923,StackTrace_current,1094,StateError,321,StateError$,814,StaticClosure,924,Stream,925,StreamController,926,StreamIterator_StreamIterator,814,StreamSubscription,927,Stream_length_closure,608,Stream_length_closure0,608,String,928,StringBuffer,929,StringBuffer__writeAll,1073,String_String$fromCharCodes,1114,String__stringFromUint8List,1067,Symbol,930,Symbol0,930,SystemHash_combine,1089,SystemHash_finish,1109,TearOffClosure,931,TrustedGetRuntimeType,932,TypeError,933,TypeErrorDecoder,934,TypeErrorDecoder_extractPattern,1104,TypeErrorDecoder_noSuchMethodPattern,1153,TypeErrorDecoder_notClosurePattern,1154,TypeErrorDecoder_nullCallPattern,1155,TypeErrorDecoder_nullLiteralCallPattern,1156,TypeErrorDecoder_nullLiteralPropertyPattern,1157,TypeErrorDecoder_nullPropertyPattern,1158,TypeErrorDecoder_provokeCallErrorOn,1167,TypeErrorDecoder_provokePropertyErrorOn,1168,TypeErrorDecoder_undefinedCallPattern,1180,TypeErrorDecoder_undefinedLiteralCallPattern,1181,TypeErrorDecoder_undefinedLiteralPropertyPattern,1182,TypeErrorDecoder_undefinedPropertyPattern,1183,Uint16List,935,Uint32List,936,Uint8ClampedList,937,Uint8List,938,UnimplementedError,320,UnimplementedError$,814,UnknownJavaScriptObject,939,UnknownJsTypeError,940,UnmodifiableMapView,941,UnsupportedError,319,UnsupportedError$,814,WhereIterable,942,WhereIterator,943,Zone,944,Zone__current,1022,_AddStreamState,945,_AssertionError,946,_AsyncAwaitCompleter,947,_AsyncCallbackEntry,948,_AsyncCompleter,949,_AsyncRun__initializeScheduleImmediate,1033,_AsyncRun__initializeScheduleImmediate_closure,950,_AsyncRun__initializeScheduleImmediate_internalCallback,951,_AsyncRun__scheduleImmediateClosure,1061,_AsyncRun__scheduleImmediateJsOverride,1062,_AsyncRun__scheduleImmediateJsOverride_internalCallback,952,_AsyncRun__scheduleImmediateWithSetImmediate,1063,_AsyncRun__scheduleImmediateWithSetImmediate_internalCallback,953,_AsyncRun__scheduleImmediateWithTimer,1064,_Base64Decoder,954,_Base64Decoder__allocateBuffer,1010,_Base64Decoder__checkPadding,1015,_Base64Decoder__emptyBuffer,1024,_Base64Decoder__inverseAlphabet,1036,_Base64Decoder__trimPaddingChars,1070,_Base64Decoder_decodeChunk,1096,_Base64Encoder,955,_Base64Encoder_encodeChunk,1100,_BroadcastStream,956,_BroadcastStreamController,957,_BroadcastSubscription,556,_BufferingStreamSubscription,552,_BufferingStreamSubscription__registerErrorHandler,268,_Completer,958,_ControllerStream,959,_ControllerSubscription,960,_CopyingBytesBuilder,961,_CopyingBytesBuilder__emptyList,1025,_CyclicInitializationError,962,_DelayedData,963,_DelayedEvent,964,_DoneStreamSubscription,551,_Enum,965,_Error,966,_Error_compose,1090,_EventDispatch,967,_Exception,968,_FunctionParameters,969,_Future,970,_FutureListener,971,_Future__addListener_closure,972,_Future__asyncCompleteError_closure,973,_Future__asyncCompleteWithValue_closure,974,_Future__chainCoreFuture,1014,_Future__chainCoreFuture_closure,975,_Future__chainForeignFuture_closure,976,_Future__chainForeignFuture_closure0,976,_Future__chainForeignFuture_closure1,976,_Future__prependListeners_closure,977,_Future__propagateToListeners,1058,_Future__propagateToListeners_handleError,978,_Future__propagateToListeners_handleValueCallback,979,_Future__propagateToListeners_handleWhenCompleteCallback,980,_Future__propagateToListeners_handleWhenCompleteCallback_closure,981,_Future__propagateToListeners_handleWhenCompleteCallback_closure0,981,_HashMap,982,_HashMapKeyIterable,983,_HashMapKeyIterator,984,_HashMap__getTableEntry,1031,_HashMap__newHashTable,1053,_HashMap__setTableEntry,1066,_IdentityHashMap,985,_JSSecureRandom,723,_JS_INTEROP_INTERCEPTOR_TAG,986,_KeysOrValues,987,_KeysOrValuesOrElementsIterator,988,_NativeTypedArrayOfDouble_NativeTypedArray_ListMixin,989,_NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin,990,_NativeTypedArrayOfInt_NativeTypedArray_ListMixin,991,_NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin,992,_Parser_collectArray,1088,_Parser_create,1091,_Parser_handleArguments,1131,_Parser_handleDigit,1132,_Parser_handleExtendedOperations,1133,_Parser_handleIdentifier,1134,_Parser_handleTypeArguments,1135,_Parser_indexToType,1137,_Parser_parse,1163,_Parser_toType,1176,_Parser_toTypes,1177,_Parser_toTypesNamed,1178,_PendingEvents,993,_PendingEvents_schedule_closure,994,_Required,995,_RootZone,996,_RootZone_bindCallbackGuarded_closure,997,_StackTrace,998,_StreamControllerLifecycle,999,_StreamImpl,1000,_StreamIterator,1001,_StringStackTrace,1002,_SyncBroadcastStreamController,1003,_SyncBroadcastStreamController__sendData_closure,566,_TimerImpl,254,_TimerImpl$,814,_TimerImpl_internalCallback,1004,_Type,134,_TypeError,1005,_TypeError$fromMessage,1115,_TypeError__TypeError$forType,1111,_Universe__canonicalRecipeJoin,1012,_Universe__canonicalRecipeJoinNamed,1013,_Universe__createFutureOrRti,1018,_Universe__createGenericFunctionRti,1019,_Universe__createQuestionRti,1020,_Universe__createStarRti,1021,_Universe__installTypeTests,1034,_Universe__lookupBindingRti,1043,_Universe__lookupFunctionRti,1044,_Universe__lookupFutureOrRti,1045,_Universe__lookupGenericFunctionParameterRti,1046,_Universe__lookupGenericFunctionRti,1047,_Universe__lookupInterfaceRti,1048,_Universe__lookupQuestionRti,1049,_Universe__lookupRecordRti,1050,_Universe__lookupStarRti,1051,_Universe__lookupTerminalRti,1052,_Universe_addErasedTypes,1074,_Universe_addRules,1075,_Universe_bind,1084,_Universe_eval,1101,_Universe_evalInEnvironment,1102,_Universe_findErasedType,1107,_Universe_findRule,1108,_UnmodifiableMapMixin,1006,_UnmodifiableMapView_MapView__UnmodifiableMapMixin,1007,_UnmodifiableNativeByteBufferView,1008,_Utils_newArrayOrEmpty,1152,_Utils_objectAssign,1160,_Zone,1009,_areArgumentsSubtypes,245,_arrayInstanceType,123,_asBool,160,_asBoolQ,162,_asBoolS,161,_asDouble,163,_asDoubleQ,165,_asDoubleS,164,_asInt,167,_asIntQ,169,_asIntS,168,_asNum,171,_asNumQ,173,_asNumS,172,_asObject,155,_asString,175,_asStringQ,177,_asStringS,176,_asTop,157,_asyncAwait,258,_asyncRethrow,260,_asyncReturn,259,_asyncStartSync,257,_awaitOnObject,261,_awaitOnObject_closure,1011,_awaitOnObject_closure0,1011,_callDartFunctionFast,330,_callDartFunctionFast1,333,_checkValidIndex,98,_checkValidRange,99,_convertDartFunctionFast,329,_createRuntimeType,133,_diagnoseUnsupportedOperation,47,_ensureNativeList,95,_failedAsCheck,149,_finishIsFn,139,_functionRtiToString,180,_functionToJS1,332,_generalAsCheckImplementation,147,_generalIsTestImplementation,142,_generalNullableAsCheckImplementation,148,_generalNullableIsTestImplementation,143,_hashSeed,810,_installSpecializedAsCheck,140,_installSpecializedIsTest,136,_instanceType,124,_instanceTypeFromConstructor,125,_instanceTypeFromConstructorMiss,126,_interceptError,264,_interceptUserError,265,_invokeClosure,59,_isBool,159,_isFunctionSubtype,242,_isFutureOr,153,_isInCallbackLoop,1037,_isInt,166,_isInterfaceSubtype,243,_isListTestViaProperty,146,_isNever,158,_isNum,170,_isObject,154,_isRecordSubtype,246,_isString,174,_isSubtype,240,_isTestViaProperty,145,_isTop,156,_iterablePartsToStrings,327,_lastCallback,1039,_lastPriorityCallback,1040,_makeAsyncAwaitCompleter,255,_microtaskLoop,269,_nextCallback,1054,_noDartifyRequired,340,_noJsifyRequired,334,_nullDoneHandler,278,_nullErrorHandler,277,_nullIs,141,_recordRtiToString,179,_registerErrorHandler,268,_rootHandleError,279,_rootHandleError_closure,1060,_rootRun,280,_rootRunBinary,282,_rootRunUnary,281,_rootScheduleMicrotask,283,_rtiArrayToString,178,_rtiToString,182,_runGuarded,275,_scheduleAsyncCallback,271,_schedulePriorityAsyncCallback,272,_setArrayType,119,_startMicrotaskLoop,270,_structuralTypeOf,129,_substitute,108,_substituteArray,115,_substituteFunctionParameters,117,_substituteNamed,116,_unminifyOrTag,183,_unwrapNonDartException,55,_wrapJsFunctionForAsync,262,_wrapJsFunctionForAsync_closure,1072,allowInterop,331,alternateTagFunction,1076,applyHooksTransformer,91,argumentErrorValue,40,assertThrow,76,async__AsyncRun__scheduleImmediateJsOverride$closure,1078,async__AsyncRun__scheduleImmediateWithSetImmediate$closure,1079,async__AsyncRun__scheduleImmediateWithTimer$closure,1080,async___nullDoneHandler$closure,1081,async___nullErrorHandler$closure,1082,async___startMicrotaskLoop$closure,1083,bool,1085,boolConversionCheck,75,callMethod,336,checkNotNullable,9,closureFromTearOff,68,closureFunctionType,120,convertDartClosureToJS,60,convertDartClosureToJSUncached,61,createRecordTypePredicate,92,createRuntimeType,131,dartify,341,dartify_convert,1095,defineProperty,79,diagnoseIndexError,38,diagnoseRangeError,39,dispatchRecordsForInstanceTags,1098,double,1099,fillLiteralMap,58,findNALUIndices,342,findType,107,getAlgoOptions,355,getInterceptor$,1120,getInterceptor$asx,1121,getInterceptor$ax,1122,getInterceptor$x,1123,getIsolateAffinityTag,78,getNativeInterceptor,1,getRuntimeTypeOfDartObject,128,getTagFunction,1128,getTraceFromException,56,getTrackCryptor,344,getTrackCryptor_closure,1129,getTypeFromTypesTable,127,iae,36,initHooks,90,initHooks_closure,1138,initHooks_closure0,1138,initHooks_closure1,1138,initNativeDispatch,88,initNativeDispatchContinue,89,initNativeDispatchFlag,1139,initializeExceptionWrapper,42,instanceOrFunctionType,121,instanceType,122,int,1140,interceptorsForUncacheableTags,1142,ioore,37,isDefinitelyTopType,137,isJsIndexable,13,isNullable,247,isSoundTopType,248,isSubtype,239,isToStringVisiting,10,jsify,335,jsify__convert,1145,keyProviders,1146,logger,812,lookupAndCacheInterceptor,80,main,349,main__closure,1148,main__closure0,1148,main__closure1,1148,main__closure2,1148,main_closure,1149,main_closure0,1149,main_closure1,1149,main_closure2,1149,makeDefaultDispatchRecord,87,makeDispatchRecord,0,makeLeafDispatchRecord,86,num,1159,objectHashCode,57,participantCryptors,1164,patchInteriorProto,85,printString,351,promiseToFuture,337,promiseToFuture_closure,1165,promiseToFuture_closure0,1165,prototypeForTagFunction,1166,quoteStringForRegExp,93,saveStackTrace,54,scheduleMicrotask,273,throwConcurrentModificationError,48,throwCyclicInit,77,throwExpression,44,throwExpressionWithWrapper,45,throwLateFieldADI,353,throwLateFieldNI,352,throwUnsupportedOperation,46,toStringVisiting,1175,toStringWrapper,43,typeLiteral,135,unminifyOrTag,12,unsetCryptorParticipant,348,unsetCryptorParticipant_closure,1184,unwrapException,53,wrapException,41", + "instance": "$add,1189,$and,1190,$arguments,1216,$call,1252,$div,1191,$eq,1192,$ge,1193,$gt,1194,$index,1195,$indexSet,1196,$le,1197,$lt,1198,$mod,1199,$mul,1200,$negate,1201,$or,1202,$protected,1235,$shl,1203,$shr,1204,$sub,1205,$tdiv,1206,$this,1243,$xor,1207,T,1214,_,1208,_0,1209,_JSSecureRandom$0,814,_TimerImpl$2,814,__,1210,__0,1211,__FrameCryptor_kind_A,1673,__ParticipantKeyHandler_cryptoKeyRing_A,1683,___,1212,__internal$_current,1290,__internal$_index,1293,__internal$_iterable,1294,__internal$_length,1296,__internal$_name,1298,__js_helper$_addHashTableEntry,1300,__js_helper$_addHashTableEntry$3,1300,__js_helper$_current,1311,__js_helper$_getBucket$2,1317,__js_helper$_index,1320,__js_helper$_kind,1326,__js_helper$_length,1328,__js_helper$_message,1331,__js_helper$_name,1335,__js_helper$_rest,1345,__rti$_message,1375,_add,1384,_add$1,1384,_addAllFromArray,1274,_addAllFromArray$1,1274,_addEventError,1385,_addEventError$0,1385,_addHashTableEntry,1499,_addHashTableEntry$3,1499,_addListener,1386,_addListener$1,1386,_addPending,1387,_addPending$1,1387,_addStreamState,1388,_alphabet,1519,_arguments,1301,_argumentsExpr,1302,_as,1364,_async$_box_0,1392,_async$_controller,1432,_async$_hasValue,1447,_async$_next,1462,_async$_previous,1475,_asyncComplete,1389,_asyncComplete$1,1389,_asyncCompleteError,1390,_asyncCompleteError$2,1390,_asyncCompleteWithValue,1391,_asyncCompleteWithValue$1,1391,_bind,1365,_bind$1,1365,_bindCache,1366,_box_0,1303,_box_1,1393,_buffer,1289,_cachedRuntimeType,1367,_callOnCancel,1394,_callOnCancel$0,1394,_canFire,1395,_cancelFuture,1396,_canonicalRecipe,1368,_captured_T_1,1540,_captured__convertedObjects_0,1541,_captured_arguments_2,1304,_captured_bodyFunction_0,1397,_captured_callback_0,1398,_captured_callback_1,1399,_captured_completer_0,1542,_captured_data_1,1400,_captured_decryptFrameInternal_3,1676,_captured_dispatch_1,1401,_captured_div_1,1402,_captured_e_1,1403,_captured_error_0,1404,_captured_error_1,1405,_captured_f_1,1406,_captured_future_1,1407,_captured_getTag_0,1305,_captured_getUnknownTag_0,1306,_captured_handleMessage_0,1686,_captured_hasError_2,1408,_captured_headerLength_5,1677,_captured_ivLength_6,1678,_captured_iv_3,1679,_captured_joinedResult_0,1409,_captured_listener_1,1410,_captured_name_0,1688,_captured_namedArgumentList_1,1307,_captured_originalSource_1,1411,_captured_protected_0,1412,_captured_prototypeForTag_0,1308,_captured_result_1,1501,_captured_s_2,1413,_captured_sb_1,1525,_captured_sourceResult_1,1414,_captured_span_2,1415,_captured_srcFrame_4,1680,_captured_stackTrace_1,1416,_captured_stackTrace_2,1417,_captured_target_1,1418,_captured_this_0,1419,_captured_this_1,1420,_captured_this_2,1681,_captured_trackId_0,1687,_captured_value_1,1421,_cell,1309,_chainForeignFuture,1422,_chainForeignFuture$1,1422,_chainFuture,1423,_chainFuture$1,1423,_chainSource,1424,_checkMutable$1,1354,_checkPosition,1355,_checkPosition$3,1355,_checkState,1425,_checkState$1,1425,_children,1689,_clearPendingComplete$0,1426,_cloneResult,1427,_cloneResult$1,1427,_codeUnitAt$1,1275,_collection$_box_0,1500,_collection$_current,1505,_collection$_keys,1509,_collection$_length,1510,_collection$_map,1511,_collection$_nums,1512,_collection$_removeHashTableEntry$2,1515,_collection$_rest,1516,_collection$_strings,1518,_complete$1,1428,_completeError,1429,_completeError$2,1429,_completeWithResultOf,1430,_completeWithResultOf$1,1430,_completeWithValue,1431,_completeWithValue$1,1431,_computeHashCode$1,1502,_computeKeys,1503,_computeKeys$0,1503,_containsKey,1504,_containsKey$1,1504,_containsTableEntry$2,1310,_contents,1526,_controller,1690,_convert$_state,1521,_convertedObjects,1215,_core$_arguments,1523,_core$_box_0,1524,_core$_memberName,1532,_core$_receiver,1536,_createSubscription$4,1433,_current,1276,_data,1356,_decryptionFailureCount,1684,_deleteTableEntry$2,1312,_doneFuture,1434,_dynamicCheckData,1369,_e2ee_cryptor$_box_0,1674,_e2ee_cryptor$_box_1,1675,_elements,1313,_enabled,1682,_encoder,1520,_enumToString,1527,_enumToString$0,1527,_error,1435,_errorExplanation,1528,_errorName,1529,_errorTest,1436,_eval,1370,_eval$1,1370,_evalCache,1371,_eventScheduled,1437,_eventState,1438,_exception,1314,_existingArgumentNames,1530,_expectsEvent$1,1439,_expr,1315,_f,1291,_findBucketIndex,1506,_findBucketIndex$2,1506,_first,1316,_firstSubscription,1440,_forEachListener,1441,_forEachListener$1,1441,_future,1442,_get,1507,_get$1,1507,_getBucket,1508,_getBucket$2,1508,_getRandomBytes$2,1544,_getStream,1691,_getStream$0,1691,_getTableBucket$2,1318,_getTableCell$2,1319,_getUint32$2,1357,_grow$1,1292,_handle,1443,_hasError,1444,_hasOneListener,1445,_hasPending,1446,_hasValidKey,1685,_hasValue,1531,_ignoreError,1448,_index,1277,_interceptor,1321,_internalName,1322,_invalidPosition,1358,_invalidPosition$3,1358,_irritant,1323,_is,1372,_isCanceled,1449,_isChained,1450,_isComplete,1451,_isEmpty,1452,_isFiring,1453,_isInputPaused,1454,_isInt32$1,1278,_isPaused,1455,_isSubtypeCache,1373,_isUnmodifiable$0,1359,_iterable,1279,_iterator,1295,_jsIndex,1324,_keys,1325,_kind,1374,_last,1327,_lastSubscription,1456,_length,1280,_level,1692,_levelChangedController,1693,_map,1329,_math$_buffer,1543,_mayAddEvent,1457,_mayAddListener,1458,_mayComplete,1459,_mayResumeInput,1460,_memberName,1330,_message,1297,_method,1332,_microsecond,1533,_modifications,1333,_modified,1334,_modified$0,1334,_name,1534,_named,1376,_namedArgumentNames,1336,_namedArguments,1535,_nativeBuffer,1360,_newFutureWithSameType$0,1461,_newHashTable,1337,_newHashTable$0,1337,_newLinkedCell,1338,_newLinkedCell$2,1338,_next,1339,_nextListener,1463,_nums,1340,_offset,1513,_onData,1464,_onDone,1465,_onError,1466,_onListen$1,1467,_onMicrotask,1468,_onMicrotask$0,1468,_onPause,1469,_onPause$0,1469,_onResume,1470,_onResume$0,1470,_onValue,1471,_once,1472,_optionalPositional,1377,_pattern,1341,_pending,1473,_precomputed1,1378,_prependListeners,1474,_prependListeners$1,1474,_previous,1342,_primary,1379,_publish,1694,_publish$1,1694,_receiver,1343,_recordPause$1,1476,_recordResume$1,1477,_remove$1,1514,_removeAfterFiring,1478,_removeHashTableEntry,1344,_removeHashTableEntry$2,1344,_removeListener$1,1479,_removeListeners,1480,_removeListeners$0,1480,_requiredPositional,1380,_rest,1381,_resultOrListeners,1481,_reverseListeners,1482,_reverseListeners$1,1482,_rti,1382,_scheduleMicrotask,1483,_sendData,1484,_sendData$1,1484,_set$2,1517,_setChained$1,1485,_setError$2,1486,_setErrorObject,1487,_setErrorObject$1,1487,_setInt8,1361,_setInt8$2,1361,_setKeys$1,1346,_setPendingComplete$0,1488,_setRangeFast$4,1362,_setTableEntry$3,1347,_setUint32$3,1363,_setValue$1,1489,_shlPositive$1,1281,_shrBothPositive,1282,_shrBothPositive$1,1282,_shrOtherPositive,1283,_shrOtherPositive$1,1283,_shrReceiverPositive$1,1284,_source,1299,_specializedTestResource,1383,_stackTrace,1537,_state,1490,_stateData,1491,_strings,1348,_subscribe,1492,_subscribe$4,1492,_subscription,1493,_target,1349,_tdivFast,1285,_tdivFast$1,1285,_tdivSlow,1286,_tdivSlow$1,1286,_thenAwait,1494,_thenAwait$1$2,1494,_tick,1495,_toListFixed$0,1287,_toListGrowable$0,1288,_toggleEventId$0,1496,_trace,1350,_typeArgumentCount,1351,_unlinkCell,1352,_unlinkCell$1,1352,_urlSafe,1522,_value,1538,_values,1353,_whenCompleteAction,1497,_writeString$1,1539,_zone,1498,abs$0,1244,add,1245,add$1,1245,addAll,1246,addAll$1,1246,argumentCount,1247,asUint8List,1248,asUint8List$0,1248,asUint8List$2,1248,bindCallback$1$1,1249,bindCallbackGuarded,1250,bindCallbackGuarded$1,1250,bodyFunction,1217,buffer,1251,callback,1253,ceilToDouble$0,1254,checkGrowable$2,1255,children,1256,close$2,1257,code,1258,codeUnitAt$1,1259,codec,1260,comma,1261,complete,1262,complete$1,1262,completeError,1263,completeError$1,1263,completeError$2,1263,completer,1218,config$1,1264,consecutiveSifCount,1265,contains$1,1266,containsKey,1267,containsKey$1,1267,convert,1268,convert$1,1268,count,1269,createBuffer$1,1270,cryptoKeyRing,1271,current,1094,currentKeyIndex,1272,currentkeySet,1273,dartException,1545,data,1219,day,1546,decode,1547,decode$1,1547,decode$3,1547,decodeFunction,1548,decodeFunction$2,1548,decodeFunction$body$FrameCryptor,1548,decoder,1549,decryptFrameInternal,1220,decrypted,1550,decryptionFailure,1551,decryptionFailure$0,1551,decryptionSuccess$0,1552,deriveKeys,1553,deriveKeys$2,1553,discardFrameWhenCryptorNotReady,1554,dispatch,1221,div,1222,e,1223,elementAt,1555,elementAt$1,1555,enabled,1556,encode,1557,encode$1,1557,encode$4,1557,encodeFunction,1558,encodeFunction$2,1558,encodeFunction$body$FrameCryptor,1558,encoder,1559,encryptionKey,1560,end,1561,endsWith,1562,endsWith$1,1562,enqueueFrame,1563,enqueueFrame$3,1563,error,1564,errorCallback,1565,errorCallback$2,1565,errorZone,1566,exportKey,1567,exportKey$1,1567,f,1224,failureTolerance,1568,fine$1,1569,finer$1,1570,first,1571,firstPendingEvent,1572,floorToDouble$0,1573,forEach,1574,forEach$1,1574,frameType,1575,fullName,1576,future,1577,get$$call,1252,get$_,1208,get$_0,1209,get$__,1210,get$__0,1211,get$___,1212,get$__js_helper$_addHashTableEntry,1300,get$__js_helper$_name,1335,get$_add,1384,get$_addAllFromArray,1274,get$_addEventError,1385,get$_addHashTableEntry,1499,get$_addListener,1386,get$_addPending,1387,get$_asyncComplete,1389,get$_asyncCompleteError,1390,get$_asyncCompleteWithValue,1391,get$_bind,1365,get$_callOnCancel,1394,get$_canFire,1395,get$_chainForeignFuture,1422,get$_chainFuture,1423,get$_chainSource,1424,get$_checkPosition,1355,get$_checkState,1425,get$_cloneResult,1427,get$_completeError,1429,get$_completeWithResultOf,1430,get$_completeWithValue,1431,get$_computeKeys,1503,get$_containsKey,1504,get$_core$_arguments,1523,get$_core$_memberName,1532,get$_core$_receiver,1536,get$_enumToString,1527,get$_error,1435,get$_errorExplanation,1528,get$_errorName,1529,get$_errorTest,1436,get$_eval,1370,get$_eventScheduled,1437,get$_existingArgumentNames,1530,get$_findBucketIndex,1506,get$_forEachListener,1441,get$_get,1507,get$_getBucket,1508,get$_getStream,1691,get$_hasError,1444,get$_hasOneListener,1445,get$_hasPending,1446,get$_ignoreError,1448,get$_invalidPosition,1358,get$_isCanceled,1449,get$_isChained,1450,get$_isComplete,1451,get$_isEmpty,1452,get$_isFiring,1453,get$_isInputPaused,1454,get$_isPaused,1455,get$_keys,1325,get$_mayAddEvent,1457,get$_mayAddListener,1458,get$_mayComplete,1459,get$_mayResumeInput,1460,get$_modified,1334,get$_namedArguments,1535,get$_nativeBuffer,1360,get$_newHashTable,1337,get$_newLinkedCell,1338,get$_onError,1466,get$_onMicrotask,1468,get$_onPause,1469,get$_onResume,1470,get$_onValue,1471,get$_prependListeners,1474,get$_publish,1694,get$_removeAfterFiring,1478,get$_removeHashTableEntry,1344,get$_removeListeners,1480,get$_reverseListeners,1482,get$_scheduleMicrotask,1483,get$_sendData,1484,get$_setErrorObject,1487,get$_setInt8,1361,get$_shrBothPositive,1282,get$_shrOtherPositive,1283,get$_subscribe,1492,get$_target,1349,get$_tdivFast,1285,get$_tdivSlow,1286,get$_thenAwait,1494,get$_unlinkCell,1352,get$_whenCompleteAction,1497,get$_zone,1498,get$add,1245,get$addAll,1246,get$asUint8List,1248,get$bindCallbackGuarded,1250,get$buffer,1251,get$complete,1262,get$completeError,1263,get$containsKey,1267,get$convert,1268,get$cryptoKeyRing,1271,get$current,1094,get$day,1546,get$decode,1547,get$decodeFunction,1548,get$decoder,1549,get$decryptionFailure,1551,get$deriveKeys,1553,get$elementAt,1555,get$enabled,1556,get$encode,1557,get$encodeFunction,1558,get$encoder,1559,get$end,1561,get$endsWith,1562,get$enqueueFrame,1563,get$errorZone,1566,get$exportKey,1567,get$forEach,1574,get$fullName,1576,get$future,1577,get$getKeySet,1578,get$getParticipantKeyHandler,1579,get$getSharedKeyHandler,1581,get$getUnencryptedBytes,1583,get$handleError,1584,get$handlesComplete,1589,get$handlesError,1590,get$handlesValue,1591,get$hasErrorCallback,1592,get$hasErrorTest,1593,get$hasValidKey,1594,get$hashCode,1595,get$hour,1598,get$internalComputeHashCode,1606,get$internalFindBucketIndex,1608,get$internalGet,1609,get$invalidValue,1612,get$isAccessor,1613,get$isClosed,1614,get$isEmpty,1615,get$isGetter,1616,get$isNotEmpty,1618,get$isScheduled,1619,get$iterator,1624,get$keyOptions,1627,get$keys,1630,get$kind,1631,get$lastIndexOf,1633,get$length,1636,get$lengthInBytes,1637,get$level,1638,get$listen,1639,get$log,1644,get$map,1647,get$matchTypeError,1649,get$matchesErrorTest,1650,get$memberName,1652,get$microsecond,1654,get$millisecond,1655,get$millisecondsSinceEpoch,1656,get$minute,1657,get$month,1659,get$moveNext,1660,get$namedArguments,1662,get$nextInt,1665,get$noSuchMethod,1666,get$offsetInBytes,1669,get$onRecord,1672,get$positionalArguments,1700,get$putIfAbsent,1702,get$ratchet,1703,get$ratchetKey,1705,get$ratchetMaterial,1706,get$readFrameInfo,1709,get$recordUserFrame,1711,get$registerBinaryCallback,1712,get$remove,1716,get$reset,1718,get$run,1721,get$runBinary,1722,get$runGuarded,1723,get$runUnary,1724,get$runUnaryGuarded,1725,get$runtimeType,1726,get$schedule,1727,get$second,1728,get$setKey,1733,get$setKeySetFromMaterial,1735,get$setRange,1737,get$setupTransform,1741,get$stackTrace,1750,get$start,1751,get$startsWith,1752,get$stream,1755,get$sublist,1756,get$substring,1757,get$then,1759,get$toBytes,1762,get$toInt,1763,get$toRadixString,1766,get$toString,1767,get$year,1780,getKeySet,1578,getKeySet$1,1578,getParticipantKeyHandler,1579,getParticipantKeyHandler$1,1579,getRange$2,1580,getSharedKeyHandler,1581,getSharedKeyHandler$0,1581,getTag,1225,getUint32$1,1582,getUnencryptedBytes,1583,getUnencryptedBytes$2,1583,getUnknownTag,1226,handleError,1584,handleError$1,1584,handleMessage,1227,handleNext$1,1585,handleUncaughtError$2,1586,handleValue$1,1587,handleWhenComplete$0,1588,handlesComplete,1589,handlesError,1590,handlesValue,1591,hasError,1228,hasErrorCallback,1592,hasErrorTest,1593,hasValidKey,1594,hashCode,1595,hashMapCellKey,1596,hashMapCellValue,1597,headerLength,1229,hour,1598,id,1599,inSameErrorZone$1,1600,index,1601,indexable,1602,info$1,1603,initialKeyIndex,1604,initialKeySet,1605,internalComputeHashCode,1606,internalComputeHashCode$1,1606,internalContainsKey$1,1607,internalFindBucketIndex,1608,internalFindBucketIndex$2,1608,internalGet,1609,internalGet$1,1609,internalRemove$1,1610,internalSet$2,1611,invalidValue,1612,isAccessor,1613,isClosed,1614,isEmpty,1615,isGetter,1616,isLoggable$1,1617,isNotEmpty,1618,isScheduled,1619,isSifAllowed$0,1620,isSync,1621,isUndefined,1622,isUtc,1623,iterator,1624,iv,1231,ivLength,1230,join$1,1625,joinedResult,1232,keyHandler,1626,keyOptions,1627,keyProviderOptions,1628,keyRingSze,1629,keys,1630,kind,1631,lastError,1632,lastIndexOf,1633,lastIndexOf$1,1633,lastPendingEvent,1634,lastSifReceivedAt,1635,length,1636,lengthInBytes,1637,level,1638,listen,1639,listen$1,1639,listen$4$cancelOnError$onDone$onError,1639,listener,1640,listenerHasError,1641,listenerValueOrError,1642,listeners,1643,log,1644,log$4,1644,loggerName,1645,makeIv$2$synchronizationSource$timestamp,1646,map,1647,map$1$1,1647,matchAsPrefix$2,1648,matchTypeError,1649,matchTypeError$1,1649,matchesErrorTest,1650,matchesErrorTest$1,1650,material,1651,memberName,1652,message,1653,microsecond,1654,millisecond,1655,millisecondsSinceEpoch,1656,minute,1657,modifiedObject,1658,month,1659,moveNext,1660,moveNext$0,1660,name,1661,namedArgumentList,1233,namedArguments,1662,names,1663,next,1664,nextInt,1665,nextInt$1,1665,noSuchMethod,1666,noSuchMethod$1,1666,object,1667,offset,1668,offsetInBytes,1669,onCancel,1670,onListen,1671,onRecord,1672,originalSource,1234,padLeft$2,1695,parent,1696,participantIdentity,1697,participantKeys,1698,perform$1,1699,positionalArguments,1700,postMessage$1,1701,prototypeForTag,1236,putIfAbsent,1702,putIfAbsent$2,1702,ratchet,1703,ratchet$2,1703,ratchetCount,1704,ratchetKey,1705,ratchetKey$1,1705,ratchetMaterial,1706,ratchetMaterial$2,1706,ratchetSalt,1707,ratchetWindowSize,1708,readFrameInfo,1709,readFrameInfo$1,1709,recordSif$0,1710,recordUserFrame,1711,recordUserFrame$0,1711,registerBinaryCallback,1712,registerBinaryCallback$3$1,1712,registerCallback$1$1,1713,registerUnaryCallback$2$1,1714,remainder$1,1715,remove,1716,remove$1,1716,removeLast$0,1717,reset,1718,reset$0,1718,resetKeyStatus$0,1719,result,1720,run,1721,run$1$1,1721,runBinary,1722,runBinary$3$3,1722,runGuarded,1723,runGuarded$1,1723,runUnary,1724,runUnary$2$2,1724,runUnaryGuarded,1725,runUnaryGuarded$1$2,1725,runtimeType,1726,s,1237,sb,1238,schedule,1727,schedule$1,1727,scheduleMicrotask$1,273,second,1728,sendCounts,1729,sequenceNumber,1730,set$__ParticipantKeyHandler_cryptoKeyRing_A,1683,set$__internal$_current,1290,set$__js_helper$_current,1311,set$_async$_next,1462,set$_async$_previous,1475,set$_collection$_current,1505,set$_controller,1690,set$_current,1276,set$_firstSubscription,1440,set$_lastSubscription,1456,set$_onDone,1465,set$_pending,1473,set$cryptoKeyRing,1271,set$kind,1631,set$length,1636,set$level,1638,setEnabled$1,1731,setInt8$2,1732,setKey,1733,setKey$1,1733,setKey$2$keyIndex,1733,setKeyIndex$1,1734,setKeySetFromMaterial,1735,setKeySetFromMaterial$2,1735,setParticipant$2,1736,setRange,1737,setRange$3,1737,setRange$4,1737,setSharedKey$2$keyIndex,1738,setSifTrailer$1,1739,setUint32$2,1740,setupTransform,1741,setupTransform$5$kind$operation$readable$trackId$writable,1741,setupTransform$6$codec$kind$operation$readable$trackId$writable,1741,setupTransform$body$FrameCryptor,1741,sharedKey,1742,sharedKeyHandler,1743,shouldChain$1,1744,sifGuard,1745,sifSequenceStartedAt,1746,skip$1,1747,source,1748,sourceResult,1239,span,1240,srcFrame,1241,ssrc,1749,stackTrace,1750,start,1751,startsWith,1752,startsWith$1,1752,state,1753,storedCallback,1754,stream,1755,sublist,1756,sublist$1,1756,sublist$2,1756,substring,1757,substring$1,1757,substring$2,1757,super$LegacyJavaScriptObject$toString,1767,super$_BroadcastStreamController$_addEventError,1213,take$1,1758,target,1242,then,1759,then$1$2$onError,1759,time,1760,timestamp,1761,toBytes,1762,toBytes$0,1762,toInt,1763,toInt$0,1763,toList$1$growable,1764,toLowerCase$0,1765,toRadixString,1766,toRadixString$1,1766,toString,1767,toString$0,1767,trackId,1768,truncateToDouble$0,1769,uncryptedMagicBytes,1770,unsetParticipant$0,1771,updateCodec$1,1772,userFramesSinceSif,1773,value,1185,variableName,1774,warning$1,1775,where$1,1776,worker,1777,write$1,1778,writeAll$2,1779,year,1780,zone,1781" + }, + "frames": "8vTAqIe+7DmC;+HAKAA6C;4CAKCTY;4CACeDE;sKAIlBAE;oBAGOF8B;8OAaAj7DAA8CgBCgEANK2EwG,A,oB;sgBATrC1EAAmB0BDgEAVW2E8E,A,AAUvCCiD,A;6qOK0KW+tBsI;eAEF49BwG;ssDJ5RWmPyC;4LA6BLzEY;mrBAuJqBlJmG;yXA8JlByKuB;uCAAAA6B;uMAuBQ1B6C;+YAYV0B4C;mMAqBL0CAARFjCsB,A;6GAkBWayC;2kBA2OHzWgB;0sDAwH+BzCoC;yJAYjBjjDAA/rBxBgxB0B,A;mRAsuByCiyB+C;g1EAmGCGAQv8BzBHsC,A;gYRq9ByBGAQr9BzBHsC,A;utCR6/BZmY6C;4lBAAAA+C;iNAmBqB9WkC;09BAgDOnc4C;ghBAgCnBA2C;uDASAA6C;8LAyCAnX8F;k1DAqHdAkG;iuBA8NEA+S;u4BA4MAA2C;8xCA0DyBAkB;8oDAkCJAkB;4DAOpBAoE;wDAIiBockF;OAChBpc0B;sJAOC2sCc;4BAIgB3sCoE;sOASjBA0B;4NAiCmBA4B;6FAGtBA4C;ubAsEKopCe;qJAEDFsB;AACEAyB;wrEA0NJlpC+C;cAEAAgG;4rIAyPEA0F;m7DAqF6B8pCmK;AACHiCsK;wRA4HtBv6DAM/gETCkCA7C4Bk1De,A,sB;sPNklElB3mCoG;iEACK6qCiC;qbAyIhB7qCqC;iEAaAAmD;ocCloFOwrCa;8BACcp5DAAsE3BDAF1IAF+B,wG,A;aEoE2BGAAuEpBm2DE,A;8DAtEWiDa;kFAKKn5DAAzCJ03DkB,AAAZyBa,A;yLA+CMAoB;kCACkBp5DAAyD/BDAF1IAF+B,4G,A;aEiF+BGAA0DxBm2DE,A;sEAzDWiDoB;0FAGKn5DAApDJ03DkB,AAAZyB0D,A;0QA0EEp5DAA+BTDAF1IAF+B,wG,A;aE2GSGAAgCFm2DE,A;2NAvBEj2DAA2BTHAF/IAFsB,A,gCE+IAEoG,A;SA3BSGAA4BFi2DE,A;0LAfoCgCmC;oDAElCj4DAAYTHAF/IAFsB,A,gCE+IAEoG,A;SAZSGAAaFi2DE,A;4KAMPp2DAFtJAFiC,+B;2aEkK2Cs4DiC;wjBAsCjCR0B;6ZAaF53DAFrNRFiC,uL;iIEmO2Bu5D8P;o2BA+EX34D6E;gnJWsJsBysDuD;wMA6xBCOuB;mHAS/BNwC;AACAC8C;uvENhpCiBqIsB;6BAOjB7B6D;AAHF7DAAqKUyF2B,A;+DAzJO/DgB;AAFAgEsB;2BAGf7ByE;AAD0CjFAAmKlC6GoB,A;mEApFCtGAAzBsBqGc,A;2FA2BECU;qGA2JzBEiB;kEAyKMnB6B;gZAiFPrFAAhbwBqGc,A;8JAybbCqB;iRAUAAqB;qRAUAAqB;mSAUWxGkB;kRAc3BDAAlaM2G6C,A;uCAsaGpHAA1ZHkHqB,A;iGA4ZQnHkB;kUAgBHqBAAlaILiC,A;AAmaJIkB;+QAUIGAApZT4FqB,A;qHA2ZiC9GkB;iZAiB5BGiC;AACDuGmB;oGAODxGAA1aH4GqB,A;yTAsbI1GAA5aJyGqB,A;6EAgbUL2B;0VAmBNEmE;uEAGDIa;kXAiBCJmE;2EAImBF4B;AACEAiC;AACtBMiB;4YAyB0B7FqL;AASAP8D;0GASbDoC;0PAWiB6FAA9YRpnCkD,A;AA+YrBmmCkE;AAIAJkE;AAIADkC;gVA4CF8BoB;iLAaZ1EsB;sMAuBFEiB;sCAIO4GmC;k4BAoFL9GiE;0EAQFyDmC;qKAiBcWe;uCAENzzD2BAtgBU8yDqC,A;2NAglBFvxDqCAlFlB8tDuD,A;uHAsFc6E0B;aAELiCmC;OAAwB/H4B;iFAOM3sDY;AAA9BsrD2B;uBAA8BtrDAAKrCgwDoD,A;6CAS0B6DsC;AADV77B0E;8CAGXtNAAoCTAAAAAAAACMslCsB,A,A,gB;6CAnC6BqB8C;AAE/BrxDkB;AADOsrD2B;iCACPtrDAAfAgwD8D,A;oEAwCqBzxDqBA1oBH8yD8B,A;uOA0sBlBhxDAAm7F6B8mDkH,A;mFAh7FzB6Ec;wNAiBYuFAAvoCY1EAA6KhByFoD,A,A;AA29BQtGAA9jCeqGc,A;ugBA0kCnBxGAAvgCJ2GmB,A;aAwgCM1GwD;AAiBdiFqD;qOAgBCzwDiDAiCmBksDAApjCZgGoB,A,AAqjCMjGiB,A;8NArBXoBO;AADP2CmB;gKAwCAjwDAA4zF6B8mDiG,A;gQAjzFtBqBO;AADPsHmB;kFAKW9DAA/qCwBqGiC,A;gNAorCCxFAAjlCxByF6B,A;oCAklC4B7GAAxkC5B6GwE,A;iLAmlCCjBe;2KAeN1DI;AADOtBAAtmCFiGqB,A;yJAgnCF1FiC;uBAKVkBiB;8QAsBO4GmC;gCACG9HiC;uBAKVkBiB;uPA4BWHiC;yMAaAAiC;iIAYT5CwF;+YAwCcxtBiC;wEAiBTowB+C;AADS4DAAh7ChBvFAA0EmCqGsB,A,AA1EPxFAA6KhByF2B,A,A;gBAowCQ7GAA1vCR6GyB,A;iCA4vCiBjB0B;AADzB1DW;08HAyOmBuES;wDAGDI4B;6JAYA9FAAvgDVgGsC,A;AAwgDKjGc;0HAMG2Fe;AACFgFyD;AACEhF4B;8KAOGI8B;+CAELEsB;sdAgBMNiB;ktBAgBFI8B;AACjBjyDAAy0EwB8mDAAO/B//CAAGa4kDAAt+HwBqGkB,A,A,4FAm+HhBpvDAAgBd2xDiD,A,qB,A;qNAv1EYlIAA1iDCP0C,A;AA2iDeX6C;AACQiByE;AAGPyF8C;AACOhGyE;AAGPgGiC;AACNjGkC;AACPiGe;oNAWVI4B;uNAaEA8B;uNAaFFqB;6EAKEEsC;AAIFEuB;8XA6BAxGAAlvDwBqGc,A;uRA2vDdxFAAxpDTyF0B,A;wDAuqDajGAAlqDbiGmB,A;6FAoqDStGAA5wDcqGgB,A;4JAqxDV5GAAxqDb6GgC,A;8DA6qDIzGAAvtDJ2GoB,A;gBAguDM1GgB;gWAgBOJAAvrDb4G8B,A;AAwrDG3GO;2CAUDCAAxrDIOsC,A;qPAgsDFsLyC;2JA2LPzMAAHKsMG,2B;iDAKPtMAALOsMG,c;6IAWDlEuB;0IAKOfyB;AACP/DmE;iYAiBOgJW;oGAqCAlEW;iEAeHiC8B;AADPhC2C;+CAGFhEkF;AACHiG2B;qIASS/JmB;8CAGV8E+B;AAEagFiC;+CAEThGoF;AACHiG8B;+IAKSpKmB;8CAGV6E6D;AAEuB/3BAA57Dfw6ByB,A;AA67DK6C0C;sHAGXrJAAtnE6BqG4B,A;AAunEdlHgC;AAKhBmKuC;6EAyCHxF8C;AACAQ0C;iFAyGe+EqC;AADPhCoB;+CAGsB3IAAIpBoHAAvmEPpnCsC,A,AAwmEH6lCwB,AACAN+B,yD;AANG1CAAtGA+HQ,AAAOjCwB,A;sFAmHKgC8B;AAFN9LAA3DKvxBAA3iEJw6BiD,A,A;AAumEFaoB;0HAGL9FAArHA+HQ,AAAOjCwB,A;oKAmIOrHAA9yEgBqGgB,A;qOAszEvBPAAxoEPpnCsC,A;AAyoEH6lCqB;AACAI4B;GACAV+B;oIAWeoF8B;AAFNhMAA/FKrxBAA7iEJw6BiD,A,A;AA6oEFaoB;8HAGL9FAA3JA+HQ,AAAOjCwB,A;sLAyKOrHAAp1EgBqGmC,A;kMAy1EZ5GAA5uEX6GoB,A;wMAkvEazFAA5vEbyFqB,A;gBA6vEiBtGAAh2EMqGyC,A;AAk2Ed5GAArvET6GwB,A;+HA4vEARAA3rEPpnCsC,A;AA4rEH6lCqB;AACAI4B;GACAV+B;oIAWeoF8B;AAFNnMAAhJKlxBAA/iEJw6BiD,A,A;AAgsEFaoB;8HAGL9FAA9MA+HQ,AAAOjCwB,A;wJA4NOrHgB;wKAMVuC+D;oIAKGuDAApuEPpnCsC,A;AAquEH6lCqB;AACAI4B;GACAV+B;sIAOeoFqE;AADPhCoB;+CAMVhJAASYyHAA5vEPpnCsC,A,AA6vEH6lCsB,AACAI4B,AACAV+B,yD;AAfG1CAAnPA+HQ,AAAOjCwB,A;oHAwQMnBe;wFAEIIG;AACCt6BAAhwEXw6ByB,A;qIAywEMNe;uFAGmBFiC;AACZIiE;AAKPEO;AACKt6BAAnxEXw6BiC,A;mJA8yEDpJAAjBO8IqB,qE;AAmBDmDgB;AADPhCkB;+CAMV/IAAUYwHAAv0EPpnCsC,A,AAw0EH6lCqB,AACAI4B,AACAGyC,AACgBoBwB,AAEdxB2B,AAA6BsBc,AAE/B/B6B,yD;AArBG1CAA7TA+HQ,AAAOjCsB,A;oJA4WNrHAAvhF6BqGuC,A;AAwhFrBjHAAz8EFkHmB,A;AA08EULmB;AAChB9GkD;iEAIKpCaApBP/wBAAp1EQw6ByF,A,A;AA02EK6CgB;AADPhCoB;+CAMVlJAAUY2HAAj4EPpnCsC,A,AAk4EH6lCsB,AACAI8B,AACAGgC,AACAb+B,yD;AAjBG1CAAvXA+HQ,AAAOjCwB,A;4FA8ZD/J2H;AAEM+LQ;AADPhCoB;+CAMV7IAAUYsHAA56EPpnCsC,A,AA66EH6lCsB,AACAIsC,AACAG0B,AACAb+B,yD;AAjBG1CAAlaA+HQ,AAAOjCwB,A;6FAifDrKAArDbCoD,AADIjxB0D,AACJixBAAM6CwD2E,AAGPyFgD,AACOhG2E,AAGPgGmC,AACNjGoC,AACPiG4F,iX,AAjBtB3CgC,A;AAyDgB8FgB;AADPhCoB;+CAMVjJAAUY0HAA//EPpnCsC,A,AAggFH6lCsB,AACAIiC,AACAG8B,AACAb+B,yD;AAjBG1CAArfA+HQ,AAAOjCwB,A;iHA0hBDlKsCAZTnxBAAngFUw6B4F,A,A;AAihFK6CQ;AADPhCoB;qJAGL9FAA9hBA+HQ,AAAOjCwB,A;2PAmjBQnBiB;8HAICIwB;AACXtGAAnuFyBqGkE,A;mYA6vFvBPAA/kFPpnCsC,A;AAglFH6lCsB;AACAIuC;AACAGuB;GACAb+B;kTA0KoByGuB;AACJG0B;mCAGTlEmC;oeAcH4D8B;0CAIAA6B;0CAIAAQ;uBAESiBU;AAAkBrEI;AAAqB2C0B;0CAKhDSQ;AAEEaqD;AAA2BII;AAA3BJAA4YD/E0B,A;0CAvYDkEQ;AAAsBjIqC;AAAiBkJ4B;0CAIvCjBQ;AAAsBlIqC;AAAkBmJ4B;2CAIxCjBQ;AAAsB9HqC;AAAe+I4B;0CAIrChBAAsFRDc,AAAYRgC,AACe7DuB,A;gQA3EXsFkB;AACRjBQ;0DAIcpDI;AAAqB2CiB;AAC/BJoB;qDAMI8BkB;AACRjBQ;8DAIcpDI;AAAqB2CiB;AAC/BJoB;qDAMI8BkB;AACRjBQ;8DAIcpDI;AAAqB2CiB;AAC/BJoB;0CAMJa2B;AACACAAqCRDc,AAAYRgC,AACe7DuB,A;yIA9BnBsEAA6BRDc,AAAYRgC,AACe7DuB,A;0CA1BnBiCAAmMSv4BAA2CEu2BwB,AAAmB4DmB,wBACtByBU,AAAkBrE0B,AACPdmC,A,AA5C3BkE8B,AACAA2B,A;2CAjMQCAAqBRDc,AAAYRgC,AACe7DuB,A;2CAlBnBgCAAiMStBAA4CETwB,AAAmB4DmB,6BACjByBU,AAAkBrE0B,AACZdmC,A,AA7C3BkE8B,AACAA2B,A;0CA/LYnCAAwMK2C4C,AAMjBRQ,AAAmB9QyC,AACnB8Q2B,AACACAAnMADc,AAAYRgC,AACe7DuB,A,2B;2GANhB4DuB;8BACG0BU;AAAkBrES;gKAWrBRmC;oGAIX4DyB;yNAaW5DmC;sNAIyCgCsD;yEAM7BlP2C;oCAKjB+RgC;AACArE2B;AAFQGAAt9BCtHAAroEsBqGkD,A,AAuoEjBjHAAxjENkHmB,A,6CA2jEazGAAvkEb2GU,A,AA0kEY4CkB,oI;AA48BxBmBW;AAEYjD8E;AAOZiD0B;qGAMqBiB+E;AAEZ1BmB;qCAGTSW;+GAE4BpD+B;AAChBnHAAhnGuBqGyC,A;AAknG/BkEW;kEAMIbkB;sCAMJaW;qJA+BKTmE;AAnBY0BuF;oFAwBI1B8C;sCAIbAiC;sCAIRS8B;oCAIJAwB;kEAKKT0B;2CAGIAiG;AAC0BcyD;AACbAgB;wCACczDqB;AACmBrBAA/6FlBpnC8D,A;AAg7FfmmC+D;AAIAJ+D;AAIAD2B;AACpB+FW;0GAWAAW;iCAIW/D6C;qMA0CLsD8B;0BAERSW;AAAsB/HqC;AAAgBgJuB;gDAItCjBW;AAAsBnIqC;AAAcoJuB;qKAOnBrF+B;AAAmB4DW;wBACtByBU;AAAkBrEkB;AACPd8B;iLAmBbuEK;8QAUM1Ee;8FAEAFU;gGAOAEe;iGAGAFU;mHAOLhGAA5zGsBqGc,A;wEA8zGRjHAA/uGfkHS,A;qCAgvGYnHmC;AACP+GiB;gDAEDIW;yDAIElHAAvvGNkHqC,A;AAwvGDtGAAv0GwBqGqB,A;gMA80GbvGwC;AACPoGiB;oBAEDIW;yLA0DDvGkG;AACGsJ8B;8BAETnH0E;AACFoHqC;oTAsELnOqF;6DAEY6EAAv9GuBqGc,A;uGA89GnCxrDAAihBEmlDc,A;6IA3gBesGE;AADH1GAA71GFyG6C,A;4BAk2GArGAAx+GuBqGiE,A;qFAm/G7B5GAAt4GM6GuB,A;iMAm5GN7GAAn5GM6GgC,A;oEA65GNzFAAv6GMyFgC,A;oGAq7GRzFAAr7GQyFgC,A;wNA+8GR7GAAr8GQ6G4C,A;2QA69GJjGAAl+GIiGgC,A;gFAq/GR7GAAh/GQ6GmC,A;mRAwgHJjGAA7gHIiGuB,A;uZAwiHI3G2B;AACAAiC;AACGuGwC;AACAAmB;sBAGkBD8D;AACAA8D;0DAGjBK+B;AACAAe;iNAShB5GAAxiHQ4GkB,A;AA0iHR5GAA1iHQ4GuB,A;s7BA8kHM5FAAxlHN4FkB,A;AAylHM5FAAzlHN4FiC,A;mCA8lHsB9G6B;AACAA+C;AAEQiB6D;AAGAA2E;AAGPyF8D;AACAAe;+MAKOhG6D;AAGAA2E;AAGPgG8D;AACAAe;iOAMAFa;+CACbMuC;4GAOaNa;+CACbMmE;8GAUfNuC;+CAEeMuC;gDAMOrGmC;AACAAoC;AACPiGoC;AACAAe;yFAIFMyB;iGAGEAoB;kGAIEJwB;qIAMcJwB;uEAENAwB;kCACbMkD;0GAQRFwB;0MAeIvGAAlvHH2G4B,A;AAmvHG3GAAnvHH2GwB,A;2CAiwHOxHAAt0DLsMG,iB;2FAy0DC9EwB;0CAIM2CiB;sEAEHjDiB;AACWtyBuDA0NIyxBa,AAAjBuFc,A;6GAxNOpEc;qEAIT1GoB;0DAcFAqB;AACAAoB;mIAyBIoGe;uEAUAIgB;AACAA6B;qIAgDA/F2B;AACAAgC;AACA2FqC;AACAAiB;yBAEF1FAAn2HFgGe,A;AAo2HEhGAAp2HFgGmB,A;4FAw2HMFkB;AACAA6B;8EASPtGAA38HwBqGwF,A;kGA+8HIxFAA52H3ByF2B,A;iCA62H+B7GAAn2H/B6GkB,A;mEA+2HiBnLsG;4DAUlB6EAAt+HwBqGc,A;6IA0hI1BuDqE;AACE1De;kDAEEMa;8HAWoBnBa;AAAjBuFI;s/DQlsIZjMgC;sFA2BRjgC+C;kEAwGOA2BApCSugCAAAAvgC0D,A,sC;iJAmDC2vBQ;4yBA2DE3vBgF;AAAAsmCqE;geAiCP90B0C;+hBC9SIAoB;+NAYVo1BsC;2JAMJA4F;OAEgBhJmC;wMA8kBFoFyB;uBACIlEsD;0FAIA3nB6E;+QAehBquBiF;sOAfgBruBiB;6NAiCXh5B2C;QAAAAwB;odAmJkBqkDuE;iFAGYtCwD;AACxB9P6B;2pBAiCcKkB;iHACD0WyB;2JAGWjHkC;AACxB9P+B;uOAuFIGkB;woBAkBT2J8D;wHAMgBiJqC;AACF8BgL;AACZjGmI;gNAcIiGmK;0FAEVqBoI;4BAGAZoG;gYA4EGtNoB;qqBGhgCK8MoD;qGAUqBllC4E;oJAKrBklCoD;oYAoBkBllC2E;8mBA2D3B7hB4E;4GC8/ED6hB0D;OAAAAuD;geCz9EOo4B8C;+NAgXNhI2C;6uREXkCpwBuF;uEAQ9BAqF;6HCjbMAqD;8YAoBNy7BkB;8kGEuQHyR4N;cAAAAqK;cAAAAsJ;cAAAA4E;cAAAAoL;qDAAAA0E;uEAAAA6F;cAAAAqK;cAAAAuJ;cAAAA4E;cAAAAmM;cAAAA4E;cAAAAsI;inBAyTO3GkR;sqGAmFkBvmCAb+hBQ6/B4B,A;84Cavdf2G4D;s2EDzfehCAAL9BsGgBtB/OwB+B4C,A,A;uyBsBobjB5asC;AAEDgVc;uFAGFAc;0EAGEAkC;yJAsBOrJkC;s6BfrfX6IAeyLS8FAxB4NXjCsB,A,A;eSnZA/FiC;68DmB9D2BvkCyB;wvBCm0B1Bq8B8G;yPAwBcr8BkD;gGAGpBq8BiG;2LAMKZkB;wxFC9qBakOwE;i0FCvJNtCsD;myCChBUrnCuF;8JA4gBRAA8B8nBSAA/ChmCvBAAAtB0B2vBAAAA3vBuF,A,A,mE,A;+tBiBulBJAuF;kpGErjB1BAAAAAA6HAQEyc8E,oE,A;m7CC7EA4U2H;AAIUiYoE;AADAtpC6BFkHem5B4E,A;8DElHfn5BAF0H0BkzBqB,AAGlBiHAAAAn6B2B,A,A;8NEnHlB05B6DFuHErIO,A;AEpIFAyH;AAaAqIc;AAVU15BAF0H0BkzB0C,A;AEhHpCwG+H;8OASGmCiC;8eAMOrImN;qCAAAAW;kCACAkCkB;kFAIZrEwE;AAEI+a4D;AAAA9C8D;AACFjYU;AAHFAuF;AAIEiYiC;uEA+XFA4B;4kEI9aqCRwE;2FAYADuF;olBGiBvB7oCA9BgxCyB6/BkD,A;qpBb5+BPsL2C;uOAqB5B5ByD;6qDGlMF9tBwE;mKA4GAAiF;8oBA2COzbkE;QAAAAkE;sNA2ZqBwqCmD;mDAaFxqC+C;+BAAAA4B;gqBAqGJ3a6B;oEAAAAAI6Sd48C+B,A;sgCsC/gCqBtGAA6ESngB0C,AAAiB+T8B,A;0jBA8DhDgT4M;sBAAAAwD;yBAAAAqH;+lHlBoJE+JoC;iyCmBrSPhKuGA+BUsCqN,AAEI5kCAjC4rCqB6/B6C,A,qI;iSiC9qCpBkJAjCirCfzJe,oB;AiChrCUloBkB;6ezClGcpXmD;uBAAAAwC;4FA+IxBA2E;gBAAAA+E;m5BA6MwBAa;6GAAAA+B;29BA6DAA8C;8FAAAAqC;+CAIxB49ByB;iFAAAA8D;qvC6ChVO6FuB;4NAkBF4GI;gcAmBerqCoB;kBAAAAqC;wIAyCpBAa;qEAAAAgE;wvBjDiCsCitCoC;4LAmBpC/akC;2QAQW6YiB;kRAIXhZkC;yXAKM/xByE;kEAEeitCa;2FAGlBjtCgE;4yDA26CqBiyBwC;8jDA85BCyUmB;AAAeAiB;8IAOQAiB;4DAOlCvCuC;AACAgHgC;8dQ1jFInrC8C;8IAAAAwC;0IAUbq/BAA+PiBgDa,A;+NAhOEA+C;+MAKAAe;8QAUf1BaAgLNyB6C,A;iyBA/JLvQ4LAQWuQgB,gV;gqCA0EaCa;6PAUAAa;uVAiBDriCwB;qhDAwHlBAa;4EAAAAyD;ulCInTgBoXAA0xCjBmoBoD,A;uLA1xCiBnoBAA2xCjBooBgD,A;4PAx9BGx/B+C;+kBAiEMkbwD;0hCAigBf6jB8F;AAEEmH2gB;uuFAyaK5Ge;mjBNzsCA7tD+B;0DAAAAAAia0Bk1D4B,A;iDA3ZDr4CqBAmaZq4CiC,A;itCQ7bhB/D6B;quCAsKA5iCoC;2oCoCpFAgyB8B;AACShyBoC9B2gBEwRgB,mDADjBxRAAAAAiGAKcm4B0C,A,A,A;+B8B7gBWn4BAAlLrBAA7B80BAAADjvBgBwR2H,A,A,A;gC8BqFKxRAAlLrBAA7B80BAAADjvBKmtCAAWKrIAA4BL1MmB,A,yEA1BK2M0EA+DL5MmC,A,A,A,A,uI8B/KTn4BAAAAA0G,A,A;AA+LE69ByZ;wJAgCa79BS;AADTgyB4B;mFAIOhyBkE;iOA6DPsjCe;4GAKAD2B;qMAaEjDsB;iPAMA4E8nB;wEAUF3BqC;uEAOArRqC;8rBAkB0CsRkB;wCAIjCtjCS;AADTsjC4B;kOASAD2B;sDACAZyD;sEAMEYuC;ilBnCrUMY6B;+HAEZ/E6B;iSAgBY+E6B;2NAgIPtT4B;8BACEwW2B;AAA6BhHAAxB7B6MmC,sB;yRAwCI7FqB;qdAUL1W+B;khBA2IkBjf2C;oTAalB4mBgB;uEAQkBp4BqE;oBACPi7BqH;wJASOj7ByE;oBACPysCsH;uXA2GbzIe;kOAQiBlFmF;AACLqEmC;2HAQdhlD0C;QAAAAwB;qSAQE6lDe;qeAiBiBlFmF;AACLqEmC;kMAQdhlD0C;QAAAAwB;2rCA+IFmoD2D;wJAMW9DyC;6bAYXiDuBApOoBzlC4C,A;2bAkSpB7hB0C;QAAAAwB;6YAwBAAyC;QAAAAwB;g8DA8EyBmyCmBA3nBlB6WsB,AAAUJAAhEViG6B,gE,A;0FA6rBkC9MS;qDAAAAoB;uDACDAE;gEAAAAiD;gEAEHlgCkF;2MAKiBmjCkD;AAC3BX8D;AACqBtCE;2DAAAAkC;mKAWrBmEAAiGzB9H2C,qC;6iCAjF6BlMgB;wBAAAAE;0BAAAAAAptBxB8W2B,AAA+BzCoBA1B/BsIM,2E,A;oJAgvB4BhtC6E;4RAOIkgC8C;OAAAAyD;6DAElBxPAAruBd+ToC,A;gPA0uBsBvEsC;OAAAAoC;mMAGQlgCkF;oPIqZXAyF;+jBAQtBi/Be;6BAAAA+BJtpBFqHuF,oD;4BM8J4BzVkD;yxBDtiB1BkSe;+HAIc/iC8D;kMAiDSA+B;uBAAAAuC;8DAC3B+a8M;AACK2nBe;2RAciBaoB;wQA6FlBb4C;6IAEoBwBwFAhM2BjS8E,A;gPA2M5BsRwC;8uBA+BcxD8G;qWA0HjC1Ne;keAWFlCQ;6BAAAAuNA6BFkHuC,A;wqCJqkCEjH8E;2aAYAA8E;wtCM/pDKpwBuC;kHAAAA2C;mNAU8B2iCa;4JAGHAa;ktDAqDhCwC6HAOShGAAoOeyKgE,A,8e;g9DAvGrBjHiE;gJAuEMxDAAgCeyKmC,A;6fAmEnB5pCa;mFAAAA2C;s0BqB/ZmBAyC;uDAAAA2B;yBAAAAoC;mLA2IUA2E;QAAAA2E;iDA6WjBwqCmD;+xBpBlafpOoD;AACAAAC0YJ8K8B,A;ADzYI9K2C;qWAiOiB9IoB;gCACMNAfpTLhzBa,A;oEeoTKgzBoBfpTLhzBwC,A;0CesTHy7B0B;mTE7FTxJe;uCACIjyB8F;8gBAoFDwcAA3B6BxcAby/BL6/B6C,A,A;uXalxBZ7/BAbkxBY6/BiB,A;QajxBvB7/BwC;2EAEd2bgS;6UAqHyB3bAb0pBY6/BiB,A;ofYhtBxBvoCAAlkBM+xC2B,A;AAkkBfjNAAjGJ8KyB,A;AAkGI9KAAlGJ8K8B,A;gQjBtTExSiB;AAAgCAe;AAChCFuB;AAAqBAqB;uMoB6aA8H0C;sBACDzH2C;sBACAjHyC;sBACAoD6C;sBACE2D+C;sBACAuE8C;wBACCzEwD;AACbD4C;0zElB9GY1C6C;uTekEN9xBsJ;8LAKdo8BAAxFJ8K8B,A;uYAuGoB5vCAAxkBD+xC8F,A;uhJAwBW8BuC;yxDQ2f1BnrC8C;gcAmGK7FwCAxDK6uCAzBnkBNhCAARIFgP,A,A,A;OyBmoBH3sCAAxDK6uCyC,A;iQAqEVmBwI;2GAGmBe8D;0cASa5B6B;mJAUnBAsC;y1BuB1ZTtpCoH;6GAYR85BA3C0gBAiFkC,A;oD2C1gBAjFA3C0gBAiF4D,iC;oD2CxgBkBuMiC;2DAEhB1J8G;AAEa5RyC;sDAKAqI+B;ggDtB1EfhHkH;AACK4BkF;AAEH5BU;AAHFAiG;uDAMsBqQgC;sPAAA1D4B;OAGpBLQ;+HAIKOO;AADAA0C;AACAAuE;kEAELjCkC;+KAGE1EAA7BG2GW,oBAAoBPQ,A;imBA6CTziB8H;AAALouBuD;6BAEOA2C;gBAChBlaiF;AAIiBmM+D;wgBAQbnMO;AAZJA2G;8DAgBIAO;AAhBJAgF;uaAmCSpvBArB8/BwB6/ByB,A;AqBz/BZ3kBgD;AAALouB2D;kCAEKAgD;gBACrBlasF;iDAE6CkaiC;AAAdrL8B;AAAcqL+B;AAClCrL8D;oCACwBqLS;AAAdrL8B;AAAcqLoC;AAGsB9L+C;oBAA3CCwB;OAkBXz9BoE;kEAWmCs9BoC;2FAI/BYkC;6lCAWYoL2B;sCAEAA0B;0FACfzWsB;mKAGOqL6E;0KAMb/O+D;omBAcc8DoG;AALVsEAAnKC2GW,oBAAoBPQ,A;mYAiLrB1K4F;kQAEKe6FAlMkCiY8B,oH;6BAkMlCjYAA7LX8F2C,AACAA2C,AACAAsB,gS;AA+LERArB2XFyFmE,0E;AqB1XEzFuCrB0XFyFmE,yD;0DqBxXuCuKY;AAAPAiB;AAAOA0F;AAM7B3La;iVAIHDkB;AATAUiC;qRAWLhP2E;oCAC2FlU2D;oCACzElbAYnZXAyH,A;uCZsZY+oCoB;gHACQ7tB4E;4UAQzBqcAApNG2Ge,AAAoBPQ,A;yLAyNb1K8H;AAMZ7DU;AAzBAAwF;AA0B4D6DqC;mmBAE5DgJ0D;yRAQYhJgG;AALVsEAArOG2GW,oBAAoBPQ,A;g/DAyP3BxOqE;kNAQoB8CiC;yEAEdYsB;mKACJ1DU;AAXFAsE;AAYa+O8C;AACX9OiH;sFAIEyDsB;yWAMAzDU;AAVFAwC;+HAYMqMAUqCW+OoE,A;AVrCoB/OAUqCpB+OyH,A;AVpCbxSqDI9e4BiTAVgOd3GApBhJE8DE,A,A,wD8B/EO6CAV+NT3GApBhJE8DE,A,A,2D;A0B+ZZ9ViHIxdO2YAVyMG3GApBhJE8DgD,A,A,+L;A0BoaIpoC0D;AANpBg4BAI9e4BiTAVgOd3GgE,A,A;6VMkRZlVU;AAjBNAmE;iFAmBwBpvBAYpejBAiE,A;oCZqeyB+oCoB;oMAG1B5ZU;AApCRA4F;AAqCmB+O+E;AAEX9OU;AA1BNAuG;AA4BIAU;AA5BJA2F;AA6Be8OqF;0OAUXjL4F;i3BAWJ7DU;AAlDAA2E;4PA2DyCwBgD;qNAGrC2GAApUC2GW,oBAAoBPQ,A;2IAyUX1KiM;igCAqFZ7DU;AAxJFA4D;ovBAkKAlBAC9gBFuKkF,A;ADghBErJU;AApKAAiF;mCAqKwGlUmD;oCAEtFlbAYxnBXAyH,A;uCZ2nBY+oCoB;4KACM7tBoE;yMAKvBqcAAtbG2GW,oBAAoBPQ,A;yLA2bb1KwI;AAMZ9DU;AAxMFA2F;AAyMkE8D8C;mzBASpDAgG;AALVsEAAtcG2GW,oBAAoBPQ,A;+kDAoVW2LY;AAAPAiB;AAAOAwC;sKAMtB3Lc;iXAOHDkB;AAZAUyB;kZAcThP6C;AACiElU0D;6MAKjEkUU;AANAAmC;AAOiElUmD;+KAE/DiUkJ;uYASACU;AAlBFAkF;4JAoBEAU;AApBFA4K;oIA6BY6DgG;AALVsEAA3XC2Ge,AAAoBPQ,A;+nCAwYH9KoE;u3BAUyBAW;+2CCnkBjCTASIMHyB,A;0mBTiEpBgK6C;4uCAwBKyBkB;AADASO;AADyBmLO;AAAPAkB;OAAAAQ;AAAOAS;AACzBnL+E;gIAEMjjByE;gcAEX+gB2C;0/GA2BGyBkB;aARiC4LO;AAAPAkB;OAAAAQ;AAAOA+B;AAGnBhM0C;AACaGiB;AAAV6LiC;AAAU7Le;kCAEEEQ;qkBAO3BlhBmI;2pBAI+B6sBY;AAAPAkB;OAAAAQ;AAAOAyD;AACkB3La;gNAEnDDkB;wFADsCCQ;2nBAQ3ClF4C;WAAAAqE;8uBAIAvcoK;AAE+BO2H;uHAE/BAuH;86BAM6B6sB0B;AAAU7L6B;AAIC6LY;AAAPAkB;OAAAAQ;AAAOAwB;AAEjB3L8E;0EAEkBAa;0TAIpCDkB;0FAFwBCQ;8sCAcI2LY;AAAPAkB;OAAAAQ;AAAOAwB;AAER3L8E;6HACpBDkB;AAFAUoF;2EAGSljB2I;mnBGpNG+vBAVkNG3GApBhJE8DsC,A,A;6nB4BzCtBtrCA4B/BAyuC0B,A;+W5BuCEla4E;AACwBiYkF;4BAIEAiB;0BACP7Lc;mCACSAuB;6BACNAiB;4BACFAe;6BACEAiB;mCACMAuB;gGAK1BxBmG;8FASwBqNyC;AACAAyC;w6BAQX/LW;AAAL+LuB;kJAGZptB8C;ulFAQiC6sBoB;AACnBhsCAVxDuB8wBCA+BHE2B,A,A;6TU6BRgboB;AACRhsCAV7DmB8wBC,A;AUwDvB9wBAVxDuB8wBAA+BHE2B,A,A;oXUmC5B7RU;AAnBNAwD;6DAuBsBkwB4D;AAAA9C2G;qFAAZtpCADhFyCo3B2F,A;ACmF7CkSgB;AAAKpLY;AAIHPQ;wVAMFzhBU;AApCNAsF;yDAsCMotB8B;AAAKpLY;AAIHPQ;6eASEzBAjCgHHl8BmD,A;AiChHuBs7BwE;AAApBY8C;AAAoBZ2D;kMAEtBpfU;AArDRA+B;kDAsDQmdqDFkCNhIqE,iEAIFnVU,A;AE5FAAuC;AAsDQmd8I;AAEFiQ8B;AAAKpLY;AAKHPQ;kzBAcFzhBU;AA3ENAgD;sUAgFQ+fyG;AACAqNkB;AAAKpLY;AASHPQ;4xBAcJ2LkB;AAAKpLY;AAQHPQ;qQA1DAtEkE;8JAiEFndU;AAvHNA6E;kDAyHMotB8B;AAAKpLY;AAKHPQ;sXAMkBoLoB;AAAShsCAVnLE8wBC,A;AUwDvB9wBAVxDuB8wBAA+BHE2B,A,A;6SUyJ1BkOU;AAzDAA+F;AA0DAqN8B;AAAKpLY;AAKHPQ;0eAKFzhBU;AApJRA8E;AAqJQ0dAD5KRvIU,A;AC6EQgIAFkCNhI6E,A;AE6DMuI+I;2KAGA1dU;AAxJRA+G;kRA+JMotB8B;AAAKpLY;AAOHPQ;+sBAWA1BU;AAjGAA+F;AAkGAqN8B;AAAKpLY;AAKHPQ;wcAMFzhBU;AA7LRA2D;iaAiMQAU;AAjMRAmF;mWAwMMotBkD;AAAKpLe;AAQHPQ;0IAJ2BtgCAVtQCkxBCiChBjBEgE,kK,A;+MvBiSbvSU;AAvNNA4F;iFAyNUggBAjCtDHl8BmD,A;AiCsDuBs7BwE;AAApBY+C;AAAoBZ2D;oFAEtBpfU;AA3NRAkD;gFA4NQsdgB;AAtKAHqC;AAsKAGAFpJNnIU,A;AElBMgIAFkCNhIoH,A;AEoIMmIc;AAtKAHmD;AAsKAGAFhJRtdU,A;AE5EAAwC;AA4NQsdiJ;AAGF8P8B;AAAKpLY;AAKHPQ;4pBAWA1BU;AA/JAA+F;AAgKAqN8B;AAAKpLY;AAKHPQ;mdAMFzhBU;AA3PRAiF;wWA8PQAU;AA9PRAkH;0SAoQMotBkD;AAAKpLe;AAOHPQ;gJAH6BtgCAVlUDkxBC,A;AUsQDlxBAVtQCkxBAiChBjBEgE,A,A;AvBkVkBpxBAVlUDkxBiJ,A;2HU2UhBwaoB;AAAShsCAVhUQ8wBC,A;AUwDvB9wBAVxDuB8wBAA+BHE2B,A,A;sPUqS1BkOU;AArMAA+F;AAsMAqN8B;AAAKpLY;AAKHPQ;4SAGJ9DoE;AACA3dU;AA/RNAwC;0OAiSQ2dAFhNR3dU,A;AEjFAA0C;AAiSQ2d+FF9MRhHsB,iE;AEiNMyW8B;AAAKpLY;AAIHPQ;2ZAOFzhBU;AA/SNA0G;0LAkTM4fgB;AA5PEzCqC;AA4PFyCAFzMJzKU,A;AEnDMgIAFkCNhIgG,A;AE0NIyKc;AA5PEzCmD;AA4PFyCAFrMN5fU,A;AE7GAAwC;AAkTM4f0I;AAEAwN8B;AAAKpLY;AAIHPQ;8VAMFzhBU;AA9TNAgF;mVAmUQotBkB;AAAKpLY;AAMHPQ;4NAEF2LkB;AAAKpLY;AAKHPQ;sSAKN1BU;AArQIA8C;yhDCjJO7JAVmXOHgC,A;ueUjMlBEAA1B0BkLc,2BAAAAkB,A;2IA+CxBr9BAwBhMcirCAjCsMA3GApBhJE8D+C,A,A,sG;iQ6BoNIJAfjIpBhoCiH,A;mDekIC66BActLmB76B2B,oBAAAA+B,A;sMd4LO+amJ;8QAvPSqpB+lBAgC5BtBqC,yEAAAAAAGhBAAAAAAkI,A,A,A;i0+BpCuxCmB2IsK;CAAAAG;oWAUAC0K;CAAAAG;kWAUAC0G;CAAAAG;wXAUAC8G;CAAAAG;8PyB30BgC7CAbgiB/CzJc,oB;mtBkB1hBgBsKkB;yGyBnhBgB5pC+BAsLpB+9B2C,AAEhB/9B8B,A;" + } +} diff --git a/example/web/favicon.png b/example/web/favicon.png new file mode 100644 index 0000000000..8aaa46ac1a Binary files /dev/null and b/example/web/favicon.png differ diff --git a/example/web/icons/Icon-192.png b/example/web/icons/Icon-192.png new file mode 100644 index 0000000000..b749bfef07 Binary files /dev/null and b/example/web/icons/Icon-192.png differ diff --git a/example/web/icons/Icon-512.png b/example/web/icons/Icon-512.png new file mode 100644 index 0000000000..88cfd48dff Binary files /dev/null and b/example/web/icons/Icon-512.png differ diff --git a/example/web/icons/Icon-maskable-192.png b/example/web/icons/Icon-maskable-192.png new file mode 100644 index 0000000000..eb9b4d76e5 Binary files /dev/null and b/example/web/icons/Icon-maskable-192.png differ diff --git a/example/web/icons/Icon-maskable-512.png b/example/web/icons/Icon-maskable-512.png new file mode 100644 index 0000000000..d69c56691f Binary files /dev/null and b/example/web/icons/Icon-maskable-512.png differ diff --git a/example/web/index.html b/example/web/index.html new file mode 100644 index 0000000000..db7638342a --- /dev/null +++ b/example/web/index.html @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + webwasm + + + + + + + + \ No newline at end of file diff --git a/example/web/manifest.json b/example/web/manifest.json new file mode 100644 index 0000000000..90bcb36f36 --- /dev/null +++ b/example/web/manifest.json @@ -0,0 +1,35 @@ +{ + "name": "flutter_webrtc_example", + "short_name": "flutter_webrtc_example", + "start_url": ".", + "display": "standalone", + "background_color": "#0175C2", + "theme_color": "#0175C2", + "description": "A new Flutter project.", + "orientation": "portrait-primary", + "prefer_related_applications": false, + "icons": [ + { + "src": "icons/Icon-192.png", + "sizes": "192x192", + "type": "image/png" + }, + { + "src": "icons/Icon-512.png", + "sizes": "512x512", + "type": "image/png" + }, + { + "src": "icons/Icon-maskable-192.png", + "sizes": "192x192", + "type": "image/png", + "purpose": "maskable" + }, + { + "src": "icons/Icon-maskable-512.png", + "sizes": "512x512", + "type": "image/png", + "purpose": "maskable" + } + ] +} diff --git a/example/windows/.gitignore b/example/windows/.gitignore new file mode 100644 index 0000000000..d492d0d98c --- /dev/null +++ b/example/windows/.gitignore @@ -0,0 +1,17 @@ +flutter/ephemeral/ + +# Visual Studio user-specific files. +*.suo +*.user +*.userosscache +*.sln.docstates + +# Visual Studio build-related files. +x64/ +x86/ + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ diff --git a/example/windows/CMakeLists.txt b/example/windows/CMakeLists.txt new file mode 100644 index 0000000000..e5b4202e39 --- /dev/null +++ b/example/windows/CMakeLists.txt @@ -0,0 +1,102 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.14) +project(flutter_webrtc_example LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "flutter_webrtc_example") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Define build configuration option. +get_property(IS_MULTICONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) +if(IS_MULTICONFIG) + set(CMAKE_CONFIGURATION_TYPES "Debug;Profile;Release" + CACHE STRING "" FORCE) +else() + if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") + endif() +endif() +# Define settings for the Profile build mode. +set(CMAKE_EXE_LINKER_FLAGS_PROFILE "${CMAKE_EXE_LINKER_FLAGS_RELEASE}") +set(CMAKE_SHARED_LINKER_FLAGS_PROFILE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE}") +set(CMAKE_C_FLAGS_PROFILE "${CMAKE_C_FLAGS_RELEASE}") +set(CMAKE_CXX_FLAGS_PROFILE "${CMAKE_CXX_FLAGS_RELEASE}") + +# Use Unicode for all projects. +add_definitions(-DUNICODE -D_UNICODE) + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_17) + target_compile_options(${TARGET} PRIVATE /W4 /WX /wd"4100") + target_compile_options(${TARGET} PRIVATE /EHsc) + target_compile_definitions(${TARGET} PRIVATE "_HAS_EXCEPTIONS=0") + target_compile_definitions(${TARGET} PRIVATE "$<$:_DEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# Application build; see runner/CMakeLists.txt. +add_subdirectory("runner") + + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# Support files are copied into place next to the executable, so that it can +# run in place. This is done instead of making a separate bundle (as on Linux) +# so that building and running from within Visual Studio will work. +set(BUILD_BUNDLE_DIR "$") +# Make the "install" step default, as it's required to run. +set(CMAKE_VS_INCLUDE_INSTALL_TO_DEFAULT_BUILD 1) +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +if(PLUGIN_BUNDLED_LIBRARIES) + install(FILES "${PLUGIN_BUNDLED_LIBRARIES}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + CONFIGURATIONS Profile;Release + COMPONENT Runtime) diff --git a/example/windows/flutter/CMakeLists.txt b/example/windows/flutter/CMakeLists.txt new file mode 100644 index 0000000000..903f4899d6 --- /dev/null +++ b/example/windows/flutter/CMakeLists.txt @@ -0,0 +1,109 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.14) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. +set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper") + +# Set fallback configurations for older versions of the flutter tool. +if (NOT DEFINED FLUTTER_TARGET_PLATFORM) + set(FLUTTER_TARGET_PLATFORM "windows-x64") +endif() + +# === Flutter Library === +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/flutter_windows.dll") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/windows/app.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "flutter_export.h" + "flutter_windows.h" + "flutter_messenger.h" + "flutter_plugin_registrar.h" + "flutter_texture_registrar.h" +) +list(TRANSFORM FLUTTER_LIBRARY_HEADERS PREPEND "${EPHEMERAL_DIR}/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}.lib") +add_dependencies(flutter flutter_assemble) + +# === Wrapper === +list(APPEND CPP_WRAPPER_SOURCES_CORE + "core_implementations.cc" + "standard_codec.cc" +) +list(TRANSFORM CPP_WRAPPER_SOURCES_CORE PREPEND "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_PLUGIN + "plugin_registrar.cc" +) +list(TRANSFORM CPP_WRAPPER_SOURCES_PLUGIN PREPEND "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_APP + "flutter_engine.cc" + "flutter_view_controller.cc" +) +list(TRANSFORM CPP_WRAPPER_SOURCES_APP PREPEND "${WRAPPER_ROOT}/") + +# Wrapper sources needed for a plugin. +add_library(flutter_wrapper_plugin STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} +) +apply_standard_settings(flutter_wrapper_plugin) +set_target_properties(flutter_wrapper_plugin PROPERTIES + POSITION_INDEPENDENT_CODE ON) +set_target_properties(flutter_wrapper_plugin PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_link_libraries(flutter_wrapper_plugin PUBLIC flutter) +target_include_directories(flutter_wrapper_plugin PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_plugin flutter_assemble) + +# Wrapper sources needed for the runner. +add_library(flutter_wrapper_app STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_APP} +) +apply_standard_settings(flutter_wrapper_app) +target_link_libraries(flutter_wrapper_app PUBLIC flutter) +target_include_directories(flutter_wrapper_app PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_app flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +set(PHONY_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/_phony_") +set_source_files_properties("${PHONY_OUTPUT}" PROPERTIES SYMBOLIC TRUE) +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} + ${PHONY_OUTPUT} + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.bat" + ${FLUTTER_TARGET_PLATFORM} $ + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} +) diff --git a/example/windows/flutter/generated_plugin_registrant.cc b/example/windows/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..d5acadb305 --- /dev/null +++ b/example/windows/flutter/generated_plugin_registrant.cc @@ -0,0 +1,17 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include +#include + +void RegisterPlugins(flutter::PluginRegistry* registry) { + FlutterWebRTCPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); + PermissionHandlerWindowsPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("PermissionHandlerWindowsPlugin")); +} diff --git a/example/windows/flutter/generated_plugin_registrant.h b/example/windows/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..dc139d85a9 --- /dev/null +++ b/example/windows/flutter/generated_plugin_registrant.h @@ -0,0 +1,15 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void RegisterPlugins(flutter::PluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ diff --git a/example/windows/flutter/generated_plugins.cmake b/example/windows/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..cb004cdc57 --- /dev/null +++ b/example/windows/flutter/generated_plugins.cmake @@ -0,0 +1,25 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc + permission_handler_windows +) + +list(APPEND FLUTTER_FFI_PLUGIN_LIST +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/windows plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) + +foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/windows plugins/${ffi_plugin}) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries}) +endforeach(ffi_plugin) diff --git a/example/windows/runner/CMakeLists.txt b/example/windows/runner/CMakeLists.txt new file mode 100644 index 0000000000..394917c053 --- /dev/null +++ b/example/windows/runner/CMakeLists.txt @@ -0,0 +1,40 @@ +cmake_minimum_required(VERSION 3.14) +project(runner LANGUAGES CXX) + +# Define the application target. To change its name, change BINARY_NAME in the +# top-level CMakeLists.txt, not the value here, or `flutter run` will no longer +# work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} WIN32 + "flutter_window.cpp" + "main.cpp" + "utils.cpp" + "win32_window.cpp" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" + "Runner.rc" + "runner.exe.manifest" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add preprocessor definitions for the build version. +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION=\"${FLUTTER_VERSION}\"") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_MAJOR=${FLUTTER_VERSION_MAJOR}") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_MINOR=${FLUTTER_VERSION_MINOR}") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_PATCH=${FLUTTER_VERSION_PATCH}") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_BUILD=${FLUTTER_VERSION_BUILD}") + +# Disable Windows macros that collide with C++ standard library functions. +target_compile_definitions(${BINARY_NAME} PRIVATE "NOMINMAX") + +# Add dependency libraries and include directories. Add any application-specific +# dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter flutter_wrapper_app) +target_link_libraries(${BINARY_NAME} PRIVATE "dwmapi.lib") +target_include_directories(${BINARY_NAME} PRIVATE "${CMAKE_SOURCE_DIR}") + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) diff --git a/example/windows/runner/Runner.rc b/example/windows/runner/Runner.rc new file mode 100644 index 0000000000..b82215f0e6 --- /dev/null +++ b/example/windows/runner/Runner.rc @@ -0,0 +1,121 @@ +// Microsoft Visual C++ generated resource script. +// +#pragma code_page(65001) +#include "resource.h" + +#define APSTUDIO_READONLY_SYMBOLS +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 2 resource. +// +#include "winres.h" + +///////////////////////////////////////////////////////////////////////////// +#undef APSTUDIO_READONLY_SYMBOLS + +///////////////////////////////////////////////////////////////////////////// +// English (United States) resources + +#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU) +LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US + +#ifdef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// TEXTINCLUDE +// + +1 TEXTINCLUDE +BEGIN + "resource.h\0" +END + +2 TEXTINCLUDE +BEGIN + "#include ""winres.h""\r\n" + "\0" +END + +3 TEXTINCLUDE +BEGIN + "\r\n" + "\0" +END + +#endif // APSTUDIO_INVOKED + + +///////////////////////////////////////////////////////////////////////////// +// +// Icon +// + +// Icon with lowest ID value placed first to ensure application icon +// remains consistent on all systems. +IDI_APP_ICON ICON "resources\\app_icon.ico" + + +///////////////////////////////////////////////////////////////////////////// +// +// Version +// + +#if defined(FLUTTER_VERSION_MAJOR) && defined(FLUTTER_VERSION_MINOR) && defined(FLUTTER_VERSION_PATCH) && defined(FLUTTER_VERSION_BUILD) +#define VERSION_AS_NUMBER FLUTTER_VERSION_MAJOR,FLUTTER_VERSION_MINOR,FLUTTER_VERSION_PATCH,FLUTTER_VERSION_BUILD +#else +#define VERSION_AS_NUMBER 1,0,0,0 +#endif + +#if defined(FLUTTER_VERSION) +#define VERSION_AS_STRING FLUTTER_VERSION +#else +#define VERSION_AS_STRING "1.0.0" +#endif + +VS_VERSION_INFO VERSIONINFO + FILEVERSION VERSION_AS_NUMBER + PRODUCTVERSION VERSION_AS_NUMBER + FILEFLAGSMASK VS_FFI_FILEFLAGSMASK +#ifdef _DEBUG + FILEFLAGS VS_FF_DEBUG +#else + FILEFLAGS 0x0L +#endif + FILEOS VOS__WINDOWS32 + FILETYPE VFT_APP + FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "040904e4" + BEGIN + VALUE "CompanyName", "com.cloudwebrtc.flutter-flutter-example" "\0" + VALUE "FileDescription", "flutter_webrtc_example" "\0" + VALUE "FileVersion", VERSION_AS_STRING "\0" + VALUE "InternalName", "flutter_webrtc_example" "\0" + VALUE "LegalCopyright", "Copyright (C) 2023 com.cloudwebrtc.flutter-flutter-example. All rights reserved." "\0" + VALUE "OriginalFilename", "flutter_webrtc_example.exe" "\0" + VALUE "ProductName", "flutter_webrtc_example" "\0" + VALUE "ProductVersion", VERSION_AS_STRING "\0" + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x409, 1252 + END +END + +#endif // English (United States) resources +///////////////////////////////////////////////////////////////////////////// + + + +#ifndef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 3 resource. +// + + +///////////////////////////////////////////////////////////////////////////// +#endif // not APSTUDIO_INVOKED diff --git a/example/windows/runner/flutter_window.cpp b/example/windows/runner/flutter_window.cpp new file mode 100644 index 0000000000..955ee3038f --- /dev/null +++ b/example/windows/runner/flutter_window.cpp @@ -0,0 +1,71 @@ +#include "flutter_window.h" + +#include + +#include "flutter/generated_plugin_registrant.h" + +FlutterWindow::FlutterWindow(const flutter::DartProject& project) + : project_(project) {} + +FlutterWindow::~FlutterWindow() {} + +bool FlutterWindow::OnCreate() { + if (!Win32Window::OnCreate()) { + return false; + } + + RECT frame = GetClientArea(); + + // The size here must match the window dimensions to avoid unnecessary surface + // creation / destruction in the startup path. + flutter_controller_ = std::make_unique( + frame.right - frame.left, frame.bottom - frame.top, project_); + // Ensure that basic setup of the controller was successful. + if (!flutter_controller_->engine() || !flutter_controller_->view()) { + return false; + } + RegisterPlugins(flutter_controller_->engine()); + SetChildContent(flutter_controller_->view()->GetNativeWindow()); + + flutter_controller_->engine()->SetNextFrameCallback([&]() { + this->Show(); + }); + + // Flutter can complete the first frame before the "show window" callback is + // registered. The following call ensures a frame is pending to ensure the + // window is shown. It is a no-op if the first frame hasn't completed yet. + flutter_controller_->ForceRedraw(); + + return true; +} + +void FlutterWindow::OnDestroy() { + if (flutter_controller_) { + flutter_controller_ = nullptr; + } + + Win32Window::OnDestroy(); +} + +LRESULT +FlutterWindow::MessageHandler(HWND hwnd, UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + // Give Flutter, including plugins, an opportunity to handle window messages. + if (flutter_controller_) { + std::optional result = + flutter_controller_->HandleTopLevelWindowProc(hwnd, message, wparam, + lparam); + if (result) { + return *result; + } + } + + switch (message) { + case WM_FONTCHANGE: + flutter_controller_->engine()->ReloadSystemFonts(); + break; + } + + return Win32Window::MessageHandler(hwnd, message, wparam, lparam); +} diff --git a/example/windows/runner/flutter_window.h b/example/windows/runner/flutter_window.h new file mode 100644 index 0000000000..6da0652f05 --- /dev/null +++ b/example/windows/runner/flutter_window.h @@ -0,0 +1,33 @@ +#ifndef RUNNER_FLUTTER_WINDOW_H_ +#define RUNNER_FLUTTER_WINDOW_H_ + +#include +#include + +#include + +#include "win32_window.h" + +// A window that does nothing but host a Flutter view. +class FlutterWindow : public Win32Window { + public: + // Creates a new FlutterWindow hosting a Flutter view running |project|. + explicit FlutterWindow(const flutter::DartProject& project); + virtual ~FlutterWindow(); + + protected: + // Win32Window: + bool OnCreate() override; + void OnDestroy() override; + LRESULT MessageHandler(HWND window, UINT const message, WPARAM const wparam, + LPARAM const lparam) noexcept override; + + private: + // The project to run. + flutter::DartProject project_; + + // The Flutter instance hosted by this window. + std::unique_ptr flutter_controller_; +}; + +#endif // RUNNER_FLUTTER_WINDOW_H_ diff --git a/example/windows/runner/main.cpp b/example/windows/runner/main.cpp new file mode 100644 index 0000000000..3ca3d9fb3b --- /dev/null +++ b/example/windows/runner/main.cpp @@ -0,0 +1,43 @@ +#include +#include +#include + +#include "flutter_window.h" +#include "utils.h" + +int APIENTRY wWinMain(_In_ HINSTANCE instance, _In_opt_ HINSTANCE prev, + _In_ wchar_t *command_line, _In_ int show_command) { + // Attach to console when present (e.g., 'flutter run') or create a + // new console when running with a debugger. + if (!::AttachConsole(ATTACH_PARENT_PROCESS) && ::IsDebuggerPresent()) { + CreateAndAttachConsole(); + } + + // Initialize COM, so that it is available for use in the library and/or + // plugins. + ::CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED); + + flutter::DartProject project(L"data"); + + std::vector command_line_arguments = + GetCommandLineArguments(); + + project.set_dart_entrypoint_arguments(std::move(command_line_arguments)); + + FlutterWindow window(project); + Win32Window::Point origin(10, 10); + Win32Window::Size size(1280, 720); + if (!window.Create(L"flutter_webrtc_example", origin, size)) { + return EXIT_FAILURE; + } + window.SetQuitOnClose(true); + + ::MSG msg; + while (::GetMessage(&msg, nullptr, 0, 0)) { + ::TranslateMessage(&msg); + ::DispatchMessage(&msg); + } + + ::CoUninitialize(); + return EXIT_SUCCESS; +} diff --git a/example/windows/runner/resource.h b/example/windows/runner/resource.h new file mode 100644 index 0000000000..66a65d1e4a --- /dev/null +++ b/example/windows/runner/resource.h @@ -0,0 +1,16 @@ +//{{NO_DEPENDENCIES}} +// Microsoft Visual C++ generated include file. +// Used by Runner.rc +// +#define IDI_APP_ICON 101 + +// Next default values for new objects +// +#ifdef APSTUDIO_INVOKED +#ifndef APSTUDIO_READONLY_SYMBOLS +#define _APS_NEXT_RESOURCE_VALUE 102 +#define _APS_NEXT_COMMAND_VALUE 40001 +#define _APS_NEXT_CONTROL_VALUE 1001 +#define _APS_NEXT_SYMED_VALUE 101 +#endif +#endif diff --git a/example/windows/runner/resources/app_icon.ico b/example/windows/runner/resources/app_icon.ico new file mode 100644 index 0000000000..c04e20caf6 Binary files /dev/null and b/example/windows/runner/resources/app_icon.ico differ diff --git a/example/windows/runner/runner.exe.manifest b/example/windows/runner/runner.exe.manifest new file mode 100644 index 0000000000..a42ea7687c --- /dev/null +++ b/example/windows/runner/runner.exe.manifest @@ -0,0 +1,20 @@ + + + + + PerMonitorV2 + + + + + + + + + + + + + + + diff --git a/example/windows/runner/utils.cpp b/example/windows/runner/utils.cpp new file mode 100644 index 0000000000..b2b08734db --- /dev/null +++ b/example/windows/runner/utils.cpp @@ -0,0 +1,65 @@ +#include "utils.h" + +#include +#include +#include +#include + +#include + +void CreateAndAttachConsole() { + if (::AllocConsole()) { + FILE *unused; + if (freopen_s(&unused, "CONOUT$", "w", stdout)) { + _dup2(_fileno(stdout), 1); + } + if (freopen_s(&unused, "CONOUT$", "w", stderr)) { + _dup2(_fileno(stdout), 2); + } + std::ios::sync_with_stdio(); + FlutterDesktopResyncOutputStreams(); + } +} + +std::vector GetCommandLineArguments() { + // Convert the UTF-16 command line arguments to UTF-8 for the Engine to use. + int argc; + wchar_t** argv = ::CommandLineToArgvW(::GetCommandLineW(), &argc); + if (argv == nullptr) { + return std::vector(); + } + + std::vector command_line_arguments; + + // Skip the first argument as it's the binary name. + for (int i = 1; i < argc; i++) { + command_line_arguments.push_back(Utf8FromUtf16(argv[i])); + } + + ::LocalFree(argv); + + return command_line_arguments; +} + +std::string Utf8FromUtf16(const wchar_t* utf16_string) { + if (utf16_string == nullptr) { + return std::string(); + } + int target_length = ::WideCharToMultiByte( + CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string, + -1, nullptr, 0, nullptr, nullptr) + -1; // remove the trailing null character + int input_length = (int)wcslen(utf16_string); + std::string utf8_string; + if (target_length <= 0 || target_length > utf8_string.max_size()) { + return utf8_string; + } + utf8_string.resize(target_length); + int converted_length = ::WideCharToMultiByte( + CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string, + input_length, utf8_string.data(), target_length, nullptr, nullptr); + if (converted_length == 0) { + return std::string(); + } + return utf8_string; +} diff --git a/example/windows/runner/utils.h b/example/windows/runner/utils.h new file mode 100644 index 0000000000..3879d54755 --- /dev/null +++ b/example/windows/runner/utils.h @@ -0,0 +1,19 @@ +#ifndef RUNNER_UTILS_H_ +#define RUNNER_UTILS_H_ + +#include +#include + +// Creates a console for the process, and redirects stdout and stderr to +// it for both the runner and the Flutter library. +void CreateAndAttachConsole(); + +// Takes a null-terminated wchar_t* encoded in UTF-16 and returns a std::string +// encoded in UTF-8. Returns an empty std::string on failure. +std::string Utf8FromUtf16(const wchar_t* utf16_string); + +// Gets the command line arguments passed in as a std::vector, +// encoded in UTF-8. Returns an empty std::vector on failure. +std::vector GetCommandLineArguments(); + +#endif // RUNNER_UTILS_H_ diff --git a/example/windows/runner/win32_window.cpp b/example/windows/runner/win32_window.cpp new file mode 100644 index 0000000000..60608d0fe5 --- /dev/null +++ b/example/windows/runner/win32_window.cpp @@ -0,0 +1,288 @@ +#include "win32_window.h" + +#include +#include + +#include "resource.h" + +namespace { + +/// Window attribute that enables dark mode window decorations. +/// +/// Redefined in case the developer's machine has a Windows SDK older than +/// version 10.0.22000.0. +/// See: https://docs.microsoft.com/windows/win32/api/dwmapi/ne-dwmapi-dwmwindowattribute +#ifndef DWMWA_USE_IMMERSIVE_DARK_MODE +#define DWMWA_USE_IMMERSIVE_DARK_MODE 20 +#endif + +constexpr const wchar_t kWindowClassName[] = L"FLUTTER_RUNNER_WIN32_WINDOW"; + +/// Registry key for app theme preference. +/// +/// A value of 0 indicates apps should use dark mode. A non-zero or missing +/// value indicates apps should use light mode. +constexpr const wchar_t kGetPreferredBrightnessRegKey[] = + L"Software\\Microsoft\\Windows\\CurrentVersion\\Themes\\Personalize"; +constexpr const wchar_t kGetPreferredBrightnessRegValue[] = L"AppsUseLightTheme"; + +// The number of Win32Window objects that currently exist. +static int g_active_window_count = 0; + +using EnableNonClientDpiScaling = BOOL __stdcall(HWND hwnd); + +// Scale helper to convert logical scaler values to physical using passed in +// scale factor +int Scale(int source, double scale_factor) { + return static_cast(source * scale_factor); +} + +// Dynamically loads the |EnableNonClientDpiScaling| from the User32 module. +// This API is only needed for PerMonitor V1 awareness mode. +void EnableFullDpiSupportIfAvailable(HWND hwnd) { + HMODULE user32_module = LoadLibraryA("User32.dll"); + if (!user32_module) { + return; + } + auto enable_non_client_dpi_scaling = + reinterpret_cast( + GetProcAddress(user32_module, "EnableNonClientDpiScaling")); + if (enable_non_client_dpi_scaling != nullptr) { + enable_non_client_dpi_scaling(hwnd); + } + FreeLibrary(user32_module); +} + +} // namespace + +// Manages the Win32Window's window class registration. +class WindowClassRegistrar { + public: + ~WindowClassRegistrar() = default; + + // Returns the singleton registrar instance. + static WindowClassRegistrar* GetInstance() { + if (!instance_) { + instance_ = new WindowClassRegistrar(); + } + return instance_; + } + + // Returns the name of the window class, registering the class if it hasn't + // previously been registered. + const wchar_t* GetWindowClass(); + + // Unregisters the window class. Should only be called if there are no + // instances of the window. + void UnregisterWindowClass(); + + private: + WindowClassRegistrar() = default; + + static WindowClassRegistrar* instance_; + + bool class_registered_ = false; +}; + +WindowClassRegistrar* WindowClassRegistrar::instance_ = nullptr; + +const wchar_t* WindowClassRegistrar::GetWindowClass() { + if (!class_registered_) { + WNDCLASS window_class{}; + window_class.hCursor = LoadCursor(nullptr, IDC_ARROW); + window_class.lpszClassName = kWindowClassName; + window_class.style = CS_HREDRAW | CS_VREDRAW; + window_class.cbClsExtra = 0; + window_class.cbWndExtra = 0; + window_class.hInstance = GetModuleHandle(nullptr); + window_class.hIcon = + LoadIcon(window_class.hInstance, MAKEINTRESOURCE(IDI_APP_ICON)); + window_class.hbrBackground = 0; + window_class.lpszMenuName = nullptr; + window_class.lpfnWndProc = Win32Window::WndProc; + RegisterClass(&window_class); + class_registered_ = true; + } + return kWindowClassName; +} + +void WindowClassRegistrar::UnregisterWindowClass() { + UnregisterClass(kWindowClassName, nullptr); + class_registered_ = false; +} + +Win32Window::Win32Window() { + ++g_active_window_count; +} + +Win32Window::~Win32Window() { + --g_active_window_count; + Destroy(); +} + +bool Win32Window::Create(const std::wstring& title, + const Point& origin, + const Size& size) { + Destroy(); + + const wchar_t* window_class = + WindowClassRegistrar::GetInstance()->GetWindowClass(); + + const POINT target_point = {static_cast(origin.x), + static_cast(origin.y)}; + HMONITOR monitor = MonitorFromPoint(target_point, MONITOR_DEFAULTTONEAREST); + UINT dpi = FlutterDesktopGetDpiForMonitor(monitor); + double scale_factor = dpi / 96.0; + + HWND window = CreateWindow( + window_class, title.c_str(), WS_OVERLAPPEDWINDOW, + Scale(origin.x, scale_factor), Scale(origin.y, scale_factor), + Scale(size.width, scale_factor), Scale(size.height, scale_factor), + nullptr, nullptr, GetModuleHandle(nullptr), this); + + if (!window) { + return false; + } + + UpdateTheme(window); + + return OnCreate(); +} + +bool Win32Window::Show() { + return ShowWindow(window_handle_, SW_SHOWNORMAL); +} + +// static +LRESULT CALLBACK Win32Window::WndProc(HWND const window, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + if (message == WM_NCCREATE) { + auto window_struct = reinterpret_cast(lparam); + SetWindowLongPtr(window, GWLP_USERDATA, + reinterpret_cast(window_struct->lpCreateParams)); + + auto that = static_cast(window_struct->lpCreateParams); + EnableFullDpiSupportIfAvailable(window); + that->window_handle_ = window; + } else if (Win32Window* that = GetThisFromHandle(window)) { + return that->MessageHandler(window, message, wparam, lparam); + } + + return DefWindowProc(window, message, wparam, lparam); +} + +LRESULT +Win32Window::MessageHandler(HWND hwnd, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + switch (message) { + case WM_DESTROY: + window_handle_ = nullptr; + Destroy(); + if (quit_on_close_) { + PostQuitMessage(0); + } + return 0; + + case WM_DPICHANGED: { + auto newRectSize = reinterpret_cast(lparam); + LONG newWidth = newRectSize->right - newRectSize->left; + LONG newHeight = newRectSize->bottom - newRectSize->top; + + SetWindowPos(hwnd, nullptr, newRectSize->left, newRectSize->top, newWidth, + newHeight, SWP_NOZORDER | SWP_NOACTIVATE); + + return 0; + } + case WM_SIZE: { + RECT rect = GetClientArea(); + if (child_content_ != nullptr) { + // Size and position the child window. + MoveWindow(child_content_, rect.left, rect.top, rect.right - rect.left, + rect.bottom - rect.top, TRUE); + } + return 0; + } + + case WM_ACTIVATE: + if (child_content_ != nullptr) { + SetFocus(child_content_); + } + return 0; + + case WM_DWMCOLORIZATIONCOLORCHANGED: + UpdateTheme(hwnd); + return 0; + } + + return DefWindowProc(window_handle_, message, wparam, lparam); +} + +void Win32Window::Destroy() { + OnDestroy(); + + if (window_handle_) { + DestroyWindow(window_handle_); + window_handle_ = nullptr; + } + if (g_active_window_count == 0) { + WindowClassRegistrar::GetInstance()->UnregisterWindowClass(); + } +} + +Win32Window* Win32Window::GetThisFromHandle(HWND const window) noexcept { + return reinterpret_cast( + GetWindowLongPtr(window, GWLP_USERDATA)); +} + +void Win32Window::SetChildContent(HWND content) { + child_content_ = content; + SetParent(content, window_handle_); + RECT frame = GetClientArea(); + + MoveWindow(content, frame.left, frame.top, frame.right - frame.left, + frame.bottom - frame.top, true); + + SetFocus(child_content_); +} + +RECT Win32Window::GetClientArea() { + RECT frame; + GetClientRect(window_handle_, &frame); + return frame; +} + +HWND Win32Window::GetHandle() { + return window_handle_; +} + +void Win32Window::SetQuitOnClose(bool quit_on_close) { + quit_on_close_ = quit_on_close; +} + +bool Win32Window::OnCreate() { + // No-op; provided for subclasses. + return true; +} + +void Win32Window::OnDestroy() { + // No-op; provided for subclasses. +} + +void Win32Window::UpdateTheme(HWND const window) { + DWORD light_mode; + DWORD light_mode_size = sizeof(light_mode); + LSTATUS result = RegGetValue(HKEY_CURRENT_USER, kGetPreferredBrightnessRegKey, + kGetPreferredBrightnessRegValue, + RRF_RT_REG_DWORD, nullptr, &light_mode, + &light_mode_size); + + if (result == ERROR_SUCCESS) { + BOOL enable_dark_mode = light_mode == 0; + DwmSetWindowAttribute(window, DWMWA_USE_IMMERSIVE_DARK_MODE, + &enable_dark_mode, sizeof(enable_dark_mode)); + } +} diff --git a/example/windows/runner/win32_window.h b/example/windows/runner/win32_window.h new file mode 100644 index 0000000000..e901dde684 --- /dev/null +++ b/example/windows/runner/win32_window.h @@ -0,0 +1,102 @@ +#ifndef RUNNER_WIN32_WINDOW_H_ +#define RUNNER_WIN32_WINDOW_H_ + +#include + +#include +#include +#include + +// A class abstraction for a high DPI-aware Win32 Window. Intended to be +// inherited from by classes that wish to specialize with custom +// rendering and input handling +class Win32Window { + public: + struct Point { + unsigned int x; + unsigned int y; + Point(unsigned int x, unsigned int y) : x(x), y(y) {} + }; + + struct Size { + unsigned int width; + unsigned int height; + Size(unsigned int width, unsigned int height) + : width(width), height(height) {} + }; + + Win32Window(); + virtual ~Win32Window(); + + // Creates a win32 window with |title| that is positioned and sized using + // |origin| and |size|. New windows are created on the default monitor. Window + // sizes are specified to the OS in physical pixels, hence to ensure a + // consistent size this function will scale the inputted width and height as + // as appropriate for the default monitor. The window is invisible until + // |Show| is called. Returns true if the window was created successfully. + bool Create(const std::wstring& title, const Point& origin, const Size& size); + + // Show the current window. Returns true if the window was successfully shown. + bool Show(); + + // Release OS resources associated with window. + void Destroy(); + + // Inserts |content| into the window tree. + void SetChildContent(HWND content); + + // Returns the backing Window handle to enable clients to set icon and other + // window properties. Returns nullptr if the window has been destroyed. + HWND GetHandle(); + + // If true, closing this window will quit the application. + void SetQuitOnClose(bool quit_on_close); + + // Return a RECT representing the bounds of the current client area. + RECT GetClientArea(); + + protected: + // Processes and route salient window messages for mouse handling, + // size change and DPI. Delegates handling of these to member overloads that + // inheriting classes can handle. + virtual LRESULT MessageHandler(HWND window, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept; + + // Called when CreateAndShow is called, allowing subclass window-related + // setup. Subclasses should return false if setup fails. + virtual bool OnCreate(); + + // Called when Destroy is called. + virtual void OnDestroy(); + + private: + friend class WindowClassRegistrar; + + // OS callback called by message pump. Handles the WM_NCCREATE message which + // is passed when the non-client area is being created and enables automatic + // non-client DPI scaling so that the non-client area automatically + // responds to changes in DPI. All other messages are handled by + // MessageHandler. + static LRESULT CALLBACK WndProc(HWND const window, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept; + + // Retrieves a class instance pointer for |window| + static Win32Window* GetThisFromHandle(HWND const window) noexcept; + + // Update the window frame's theme to match the system theme. + static void UpdateTheme(HWND const window); + + bool quit_on_close_ = false; + + // window handle for top level window. + HWND window_handle_ = nullptr; + + // window handle for hosted content. + HWND child_content_ = nullptr; +}; + +#endif // RUNNER_WIN32_WINDOW_H_ diff --git a/flutter-webrtc.code-workspace b/flutter-webrtc.code-workspace index f6ffe6a247..1ceb7bda28 100644 --- a/flutter-webrtc.code-workspace +++ b/flutter-webrtc.code-workspace @@ -8,5 +8,11 @@ "name": "example" } ], - "settings": {} + "settings": { + "java.configuration.updateBuildConfiguration": "disabled", + "clang-format.executable": "/usr/bin/clang-format", + "files.associations": { + "*.tcc": "cpp" + } + } } \ No newline at end of file diff --git a/format.sh b/format.sh new file mode 100755 index 0000000000..4b04029574 --- /dev/null +++ b/format.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +find . -type f -name "*.cc" -o -type f -name "*.h" -o -type f -name "*.m" -o -type f -name "*.mm" | xargs clang-format -style=file -i diff --git a/ios/Classes/AudioManager.h b/ios/Classes/AudioManager.h new file mode 120000 index 0000000000..1c21c3e586 --- /dev/null +++ b/ios/Classes/AudioManager.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.h \ No newline at end of file diff --git a/ios/Classes/AudioManager.m b/ios/Classes/AudioManager.m new file mode 120000 index 0000000000..bbcfb519ed --- /dev/null +++ b/ios/Classes/AudioManager.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.m \ No newline at end of file diff --git a/ios/Classes/AudioProcessingAdapter.h b/ios/Classes/AudioProcessingAdapter.h new file mode 120000 index 0000000000..f3048db2f1 --- /dev/null +++ b/ios/Classes/AudioProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.h \ No newline at end of file diff --git a/ios/Classes/AudioProcessingAdapter.m b/ios/Classes/AudioProcessingAdapter.m new file mode 120000 index 0000000000..803efdda50 --- /dev/null +++ b/ios/Classes/AudioProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.m \ No newline at end of file diff --git a/ios/Classes/AudioUtils.h b/ios/Classes/AudioUtils.h new file mode 120000 index 0000000000..efc6c758c8 --- /dev/null +++ b/ios/Classes/AudioUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.h \ No newline at end of file diff --git a/ios/Classes/AudioUtils.m b/ios/Classes/AudioUtils.m new file mode 120000 index 0000000000..5023efe9fd --- /dev/null +++ b/ios/Classes/AudioUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.m \ No newline at end of file diff --git a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h new file mode 100644 index 0000000000..cefdbfcb40 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h @@ -0,0 +1,25 @@ +// +// FlutterBroadcastScreenCapturer.h +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#import +#import +NS_ASSUME_NONNULL_BEGIN + +extern NSString* const kRTCScreensharingSocketFD; +extern NSString* const kRTCAppGroupIdentifier; +extern NSString* const kRTCScreenSharingExtension; + +@class FlutterSocketConnectionFrameReader; + +@interface FlutterBroadcastScreenCapturer : RTCVideoCapturer +- (void)startCapture; +- (void)stopCapture; +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m new file mode 100644 index 0000000000..147207a1eb --- /dev/null +++ b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m @@ -0,0 +1,69 @@ +// +// FlutterBroadcastScreenCapturer.m +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#import "FlutterBroadcastScreenCapturer.h" +#import "FlutterSocketConnection.h" +#import "FlutterSocketConnectionFrameReader.h" + +NSString* const kRTCScreensharingSocketFD = @"rtc_SSFD"; +NSString* const kRTCAppGroupIdentifier = @"RTCAppGroupIdentifier"; +NSString* const kRTCScreenSharingExtension = @"RTCScreenSharingExtension"; + +@interface FlutterBroadcastScreenCapturer () + +@property(nonatomic, retain) FlutterSocketConnectionFrameReader* capturer; + +@end + +@interface FlutterBroadcastScreenCapturer (Private) + +@property(nonatomic, readonly) NSString* appGroupIdentifier; + +@end + +@implementation FlutterBroadcastScreenCapturer + +- (void)startCapture { + if (!self.appGroupIdentifier) { + return; + } + + NSString* socketFilePath = [self filePathForApplicationGroupIdentifier:self.appGroupIdentifier]; + FlutterSocketConnectionFrameReader* frameReader = + [[FlutterSocketConnectionFrameReader alloc] initWithDelegate:self.delegate]; + FlutterSocketConnection* connection = + [[FlutterSocketConnection alloc] initWithFilePath:socketFilePath]; + self.capturer = frameReader; + [self.capturer startCaptureWithConnection:connection]; +} + +- (void)stopCapture { + [self.capturer stopCapture]; +} +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if (completionHandler != nil) { + completionHandler(); + } +} +// MARK: Private Methods + +- (NSString*)appGroupIdentifier { + NSDictionary* infoDictionary = [[NSBundle mainBundle] infoDictionary]; + return infoDictionary[kRTCAppGroupIdentifier]; +} + +- (NSString*)filePathForApplicationGroupIdentifier:(nonnull NSString*)identifier { + NSURL* sharedContainer = + [[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:identifier]; + NSString* socketFilePath = + [[sharedContainer URLByAppendingPathComponent:kRTCScreensharingSocketFD] path]; + + return socketFilePath; +} + +@end diff --git a/ios/Classes/Broadcast/FlutterSocketConnection.h b/ios/Classes/Broadcast/FlutterSocketConnection.h new file mode 100644 index 0000000000..879e4e3202 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnection.h @@ -0,0 +1,20 @@ +// +// FlutterSocketConnection.h +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 08/01/2021. +// + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface FlutterSocketConnection : NSObject + +- (instancetype)initWithFilePath:(nonnull NSString*)filePath; +- (void)openWithStreamDelegate:(id)streamDelegate; +- (void)close; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/Classes/Broadcast/FlutterSocketConnection.m b/ios/Classes/Broadcast/FlutterSocketConnection.m new file mode 100644 index 0000000000..4a0cbe3797 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnection.m @@ -0,0 +1,157 @@ +// +// FlutterSocketConnection.m +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 08/01/2021. +// + +#include +#include + +#import "FlutterSocketConnection.h" + +@interface FlutterSocketConnection () + +@property(nonatomic, assign) int serverSocket; +@property(nonatomic, strong) dispatch_source_t listeningSource; + +@property(nonatomic, strong) NSThread* networkThread; + +@property(nonatomic, strong) NSInputStream* inputStream; +@property(nonatomic, strong) NSOutputStream* outputStream; + +@end + +@implementation FlutterSocketConnection + +- (instancetype)initWithFilePath:(nonnull NSString*)filePath { + self = [super init]; + + [self setupNetworkThread]; + + self.serverSocket = socket(AF_UNIX, SOCK_STREAM, 0); + if (self.serverSocket < 0) { + NSLog(@"failure creating socket"); + return nil; + } + + if (![self setupSocketWithFileAtPath:filePath]) { + close(self.serverSocket); + return nil; + } + + return self; +} + +- (void)openWithStreamDelegate:(id)streamDelegate { + int status = listen(self.serverSocket, 10); + if (status < 0) { + NSLog(@"failure: socket listening"); + return; + } + + dispatch_source_t listeningSource = + dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, self.serverSocket, 0, NULL); + dispatch_source_set_event_handler(listeningSource, ^{ + int clientSocket = accept(self.serverSocket, NULL, NULL); + if (clientSocket < 0) { + NSLog(@"failure accepting connection"); + return; + } + + CFReadStreamRef readStream; + CFWriteStreamRef writeStream; + + CFStreamCreatePairWithSocket(kCFAllocatorDefault, clientSocket, &readStream, &writeStream); + + self.inputStream = (__bridge_transfer NSInputStream*)readStream; + self.inputStream.delegate = streamDelegate; + [self.inputStream setProperty:@"kCFBooleanTrue" + forKey:@"kCFStreamPropertyShouldCloseNativeSocket"]; + + self.outputStream = (__bridge_transfer NSOutputStream*)writeStream; + [self.outputStream setProperty:@"kCFBooleanTrue" + forKey:@"kCFStreamPropertyShouldCloseNativeSocket"]; + + [self.networkThread start]; + [self performSelector:@selector(scheduleStreams) + onThread:self.networkThread + withObject:nil + waitUntilDone:true]; + + [self.inputStream open]; + [self.outputStream open]; + }); + + self.listeningSource = listeningSource; + dispatch_resume(listeningSource); +} + +- (void)close { + if (![self.networkThread isExecuting]) { + return; + } + + [self performSelector:@selector(unscheduleStreams) + onThread:self.networkThread + withObject:nil + waitUntilDone:true]; + + self.inputStream.delegate = nil; + self.outputStream.delegate = nil; + + [self.inputStream close]; + [self.outputStream close]; + + [self.networkThread cancel]; + + dispatch_source_cancel(self.listeningSource); + close(self.serverSocket); +} + +// MARK: - Private Methods + +- (void)setupNetworkThread { + self.networkThread = [[NSThread alloc] initWithBlock:^{ + do { + @autoreleasepool { + [[NSRunLoop currentRunLoop] run]; + } + } while (![NSThread currentThread].isCancelled); + }]; + self.networkThread.qualityOfService = NSQualityOfServiceUserInitiated; +} + +- (BOOL)setupSocketWithFileAtPath:(NSString*)filePath { + struct sockaddr_un addr; + memset(&addr, 0, sizeof(addr)); + addr.sun_family = AF_UNIX; + + if (filePath.length > sizeof(addr.sun_path)) { + NSLog(@"failure: path too long"); + return false; + } + + unlink(filePath.UTF8String); + strncpy(addr.sun_path, filePath.UTF8String, sizeof(addr.sun_path) - 1); + + int status = bind(self.serverSocket, (struct sockaddr*)&addr, sizeof(addr)); + if (status < 0) { + NSLog(@"failure: socket binding"); + return false; + } + + return true; +} + +- (void)scheduleStreams { + [self.inputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; + [self.outputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; +} + +- (void)unscheduleStreams { + [self.inputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; + [self.outputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; +} + +@end diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h new file mode 100644 index 0000000000..230616e207 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h @@ -0,0 +1,23 @@ +// +// FlutterSocketConnectionFrameReader.h +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +@class FlutterSocketConnection; + +@interface FlutterSocketConnectionFrameReader : RTCVideoCapturer + +- (instancetype)initWithDelegate:(__weak id)delegate; +- (void)startCaptureWithConnection:(nonnull FlutterSocketConnection*)connection; +- (void)stopCapture; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m new file mode 100644 index 0000000000..7485a3492a --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m @@ -0,0 +1,259 @@ +// +// FlutterSocketConnectionFrameReader.m +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#include + +#import +#import +#import + +#import "FlutterSocketConnection.h" +#import "FlutterSocketConnectionFrameReader.h" + +const NSUInteger kMaxReadLength = 10 * 1024; + +@interface Message : NSObject + +@property(nonatomic, assign, readonly) CVImageBufferRef imageBuffer; +@property(nonatomic, copy, nullable) void (^didComplete)(BOOL succes, Message* message); + +- (NSInteger)appendBytes:(UInt8*)buffer length:(NSUInteger)length; + +@end + +@interface Message () + +@property(nonatomic, assign) CVImageBufferRef imageBuffer; +@property(nonatomic, assign) int imageOrientation; +@property(nonatomic, assign) CFHTTPMessageRef framedMessage; + +@end + +@implementation Message + +- (instancetype)init { + self = [super init]; + if (self) { + self.imageBuffer = NULL; + } + + return self; +} + +- (void)dealloc { + CVPixelBufferRelease(_imageBuffer); +} + +/** Returns the amount of missing bytes to complete the message, or -1 when not enough bytes were + * provided to compute the message length */ +- (NSInteger)appendBytes:(UInt8*)buffer length:(NSUInteger)length { + if (!_framedMessage) { + _framedMessage = CFHTTPMessageCreateEmpty(kCFAllocatorDefault, false); + } + + CFHTTPMessageAppendBytes(_framedMessage, buffer, length); + if (!CFHTTPMessageIsHeaderComplete(_framedMessage)) { + return -1; + } + + NSInteger contentLength = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Content-Length")) integerValue]; + NSInteger bodyLength = + (NSInteger)[CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)) length]; + + NSInteger missingBytesCount = contentLength - bodyLength; + if (missingBytesCount == 0) { + BOOL success = [self unwrapMessage:self.framedMessage]; + self.didComplete(success, self); + + CFRelease(self.framedMessage); + self.framedMessage = NULL; + } + + return missingBytesCount; +} + +// MARK: Private Methods + +- (CIContext*)imageContext { + // Initializing a CIContext object is costly, so we use a singleton instead + static CIContext* imageContext = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + imageContext = [[CIContext alloc] initWithOptions:nil]; + }); + + return imageContext; +} + +- (BOOL)unwrapMessage:(CFHTTPMessageRef)framedMessage { + size_t width = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Buffer-Width")) integerValue]; + size_t height = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Buffer-Height")) integerValue]; + _imageOrientation = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Buffer-Orientation")) intValue]; + + NSData* messageData = CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)); + + // Copy the pixel buffer + CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, + kCVPixelFormatType_32BGRA, NULL, &_imageBuffer); + if (status != kCVReturnSuccess) { + NSLog(@"CVPixelBufferCreate failed"); + return false; + } + + [self copyImageData:messageData toPixelBuffer:&_imageBuffer]; + + return true; +} + +- (void)copyImageData:(NSData*)data toPixelBuffer:(CVPixelBufferRef*)pixelBuffer { + CVPixelBufferLockBaseAddress(*pixelBuffer, 0); + + CIImage* image = [CIImage imageWithData:data]; + [self.imageContext render:image toCVPixelBuffer:*pixelBuffer]; + + CVPixelBufferUnlockBaseAddress(*pixelBuffer, 0); +} + +@end + +// MARK: - + +@interface FlutterSocketConnectionFrameReader () + +@property(nonatomic, strong) FlutterSocketConnection* connection; +@property(nonatomic, strong) Message* message; + +@end + +@implementation FlutterSocketConnectionFrameReader { + mach_timebase_info_data_t _timebaseInfo; + NSInteger _readLength; + int64_t _startTimeStampNs; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + self = [super initWithDelegate:delegate]; + if (self) { + mach_timebase_info(&_timebaseInfo); + } + + return self; +} + +- (void)startCaptureWithConnection:(FlutterSocketConnection*)connection { + _startTimeStampNs = -1; + + self.connection = connection; + self.message = nil; + + [self.connection openWithStreamDelegate:self]; +} + +- (void)stopCapture { + [self.connection close]; +} + +// MARK: Private Methods + +- (void)readBytesFromStream:(NSInputStream*)stream { + if (!stream.hasBytesAvailable) { + return; + } + + if (!self.message) { + self.message = [[Message alloc] init]; + _readLength = kMaxReadLength; + + __weak __typeof__(self) weakSelf = self; + self.message.didComplete = ^(BOOL success, Message* message) { + if (success) { + [weakSelf didCaptureVideoFrame:message.imageBuffer + withOrientation:message.imageOrientation]; + } + + weakSelf.message = nil; + }; + } + + uint8_t buffer[_readLength]; + NSInteger numberOfBytesRead = [stream read:buffer maxLength:_readLength]; + if (numberOfBytesRead < 0) { + NSLog(@"error reading bytes from stream"); + return; + } + + _readLength = [self.message appendBytes:buffer length:numberOfBytesRead]; + if (_readLength == -1 || _readLength > kMaxReadLength) { + _readLength = kMaxReadLength; + } +} + +- (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer + withOrientation:(CGImagePropertyOrientation)orientation { + int64_t currentTime = mach_absolute_time(); + int64_t currentTimeStampNs = currentTime * _timebaseInfo.numer / _timebaseInfo.denom; + + if (_startTimeStampNs < 0) { + _startTimeStampNs = currentTimeStampNs; + } + + RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t frameTimeStampNs = currentTimeStampNs - _startTimeStampNs; + + RTCVideoRotation rotation; + switch (orientation) { + case kCGImagePropertyOrientationLeft: + rotation = RTCVideoRotation_90; + break; + case kCGImagePropertyOrientationDown: + rotation = RTCVideoRotation_180; + break; + case kCGImagePropertyOrientationRight: + rotation = RTCVideoRotation_270; + break; + default: + rotation = RTCVideoRotation_0; + break; + } + + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:[rtcPixelBuffer toI420] + rotation:rotation + timeStampNs:frameTimeStampNs]; + + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end + +@implementation FlutterSocketConnectionFrameReader (NSStreamDelegate) + +- (void)stream:(NSStream*)aStream handleEvent:(NSStreamEvent)eventCode { + switch (eventCode) { + case NSStreamEventOpenCompleted: + NSLog(@"server stream open completed"); + break; + case NSStreamEventHasBytesAvailable: + [self readBytesFromStream:(NSInputStream*)aStream]; + break; + case NSStreamEventEndEncountered: + NSLog(@"server stream end encountered"); + [self stopCapture]; + break; + case NSStreamEventErrorOccurred: + NSLog(@"server stream error encountered: %@", aStream.streamError.localizedDescription); + break; + + default: + break; + } +} + +@end diff --git a/ios/Classes/CameraUtils.h b/ios/Classes/CameraUtils.h new file mode 120000 index 0000000000..a31c2baab2 --- /dev/null +++ b/ios/Classes/CameraUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.h \ No newline at end of file diff --git a/ios/Classes/CameraUtils.m b/ios/Classes/CameraUtils.m new file mode 120000 index 0000000000..336e1ea963 --- /dev/null +++ b/ios/Classes/CameraUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.m \ No newline at end of file diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h deleted file mode 100644 index f951a3d4ba..0000000000 --- a/ios/Classes/FlutterRPScreenRecorder.h +++ /dev/null @@ -1,9 +0,0 @@ -#import - -@interface FlutterRPScreenRecorder : RTCVideoCapturer - --(void)startCapture; - --(void)stopCapture; - -@end diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m deleted file mode 100644 index aa61614915..0000000000 --- a/ios/Classes/FlutterRPScreenRecorder.m +++ /dev/null @@ -1,72 +0,0 @@ -#import "FlutterRPScreenRecorder.h" -#import - -//See: https://developer.apple.com/videos/play/wwdc2017/606/ - -@implementation FlutterRPScreenRecorder { - RPScreenRecorder *screenRecorder; - RTCVideoSource *source; -} - -- (instancetype)initWithDelegate:(__weak id)delegate { - source = delegate; - return [super initWithDelegate:delegate]; -} - --(void)startCapture -{ - if(screenRecorder == NULL) - screenRecorder = [RPScreenRecorder sharedRecorder]; - - [screenRecorder setMicrophoneEnabled:NO]; - - if (![screenRecorder isAvailable]) { - NSLog(@"Screen recorder is not available!"); - return; - } - - [screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { - if (bufferType == RPSampleBufferTypeVideo) {// We want video only now - [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; - } - } completionHandler:^(NSError * _Nullable error) { - if (error != nil) - NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); - }]; -} - --(void)stopCapture -{ - [screenRecorder stopCaptureWithHandler:^(NSError * _Nullable error) { - if (error != nil) - NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); - }]; -} - --(void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer sampleType:(RPSampleBufferType)sampleType -{ - if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || - !CMSampleBufferDataIsReady(sampleBuffer)) { - return; - } - - CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - if (pixelBuffer == nil) { - return; - } - - size_t width = CVPixelBufferGetWidth(pixelBuffer); - size_t height = CVPixelBufferGetHeight(pixelBuffer); - - [source adaptOutputFormatToWidth:width/2 height:height/2 fps:8]; - - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; - int64_t timeStampNs = - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:RTCVideoRotation_0 - timeStampNs:timeStampNs]; - [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; -} - -@end diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink-Interface.h b/ios/Classes/FlutterRTCAudioSink-Interface.h new file mode 120000 index 0000000000..940c06d646 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink-Interface.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.h b/ios/Classes/FlutterRTCAudioSink.h new file mode 120000 index 0000000000..5242de9e22 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.mm b/ios/Classes/FlutterRTCAudioSink.mm new file mode 120000 index 0000000000..c15372c4ed --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.mm \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index c2e039f072..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) NSNumber *flutterChannelId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index e408c53076..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,165 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSNumber *)flutterChannelId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterChannelId:(NSNumber *)flutterChannelId -{ - objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.flutterChannelId = dataChannelId; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(id)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - - NSData *bytes = [type isEqualToString:@"binary"] ? - ((FlutterStandardTypedData*)data).data : - [data dataUsingEncoding:NSUTF8StringEncoding]; - - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": channel.flutterChannelId, - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - id data; - if (buffer.isBinary) { - type = @"binary"; - data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": channel.flutterChannelId, - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDesktopCapturer.h b/ios/Classes/FlutterRTCDesktopCapturer.h new file mode 120000 index 0000000000..eff4773160 --- /dev/null +++ b/ios/Classes/FlutterRTCDesktopCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDesktopCapturer.m b/ios/Classes/FlutterRTCDesktopCapturer.m new file mode 120000 index 0000000000..5388e628f4 --- /dev/null +++ b/ios/Classes/FlutterRTCDesktopCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h deleted file mode 100644 index 03e7655595..0000000000 --- a/ios/Classes/FlutterRTCFrameCapturer.h +++ /dev/null @@ -1,8 +0,0 @@ -#import -#import - -@interface FlutterRTCFrameCapturer : NSObject - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; - -@end diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m deleted file mode 100644 index 34ae78f1a8..0000000000 --- a/ios/Classes/FlutterRTCFrameCapturer.m +++ /dev/null @@ -1,82 +0,0 @@ -#import - -#import "FlutterRTCFrameCapturer.h" - -#include "libyuv.h" - -@import CoreImage; -@import CoreVideo; - -@implementation FlutterRTCFrameCapturer { - RTCVideoTrack* _track; - NSString* _path; - FlutterResult _result; - bool _gotFrame; -} - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result -{ - self = [super init]; - if (self) { - _gotFrame = false; - _track = track; - _path = path; - _result = result; - [track addRenderer:self]; - } - return self; -} - -- (void)setSize:(CGSize)size -{ -} - -- (void)renderFrame:(nullable RTCVideoFrame *)frame -{ - if (_gotFrame || frame == nil) return; - _gotFrame = true; - - id buffer = frame.buffer; - CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; - - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; - CIContext *context = [CIContext contextWithOptions:nil]; - CGImageRef cgImage = [context createCGImage:ciImage - fromRect:CGRectMake(0, 0, frame.width, frame.height)]; - - UIImageOrientation orientation; - switch (frame.rotation) { - case RTCVideoRotation_90: - orientation = UIImageOrientationRight; - break; - case RTCVideoRotation_180: - orientation = UIImageOrientationDown; - break; - case RTCVideoRotation_270: - orientation = UIImageOrientationLeft; - default: - orientation = UIImageOrientationUp; - break; - } - - UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; - CGImageRelease(cgImage); - NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); - - if ([jpgData writeToFile:_path atomically:NO]) { - NSLog(@"File writed successfully to %@", _path); - _result(nil); - } else { - NSLog(@"Failed to write to file"); - _result([FlutterError errorWithCode:@"CaptureFrameFailed" - message:@"Failed to write JPEG data to file" - details:nil]); - } - - dispatch_async(dispatch_get_main_queue(), ^{ - [self->_track removeRenderer:self]; - self->_track = nil; - }); -} - -@end diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCryptor.h b/ios/Classes/FlutterRTCFrameCryptor.h new file mode 120000 index 0000000000..ad3e0de33e --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCryptor.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCryptor.m b/ios/Classes/FlutterRTCFrameCryptor.m new file mode 120000 index 0000000000..bd62d3db56 --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCryptor.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.h b/ios/Classes/FlutterRTCMediaRecorder.h new file mode 120000 index 0000000000..31ca7e3b5f --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.m b/ios/Classes/FlutterRTCMediaRecorder.m new file mode 120000 index 0000000000..1c2b1bf1a8 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 12f1633cde..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,29 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)createLocalMediaStream:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; - --(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track - torch:(BOOL) torch - result:(FlutterResult) result; - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track - toPath:(NSString *) path - result:(FlutterResult) result; -@end diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 322766b39a..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,613 +0,0 @@ -#import - -#import - -#import "FlutterRTCFrameCapturer.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRPScreenRecorder.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ @"minWidth" : @"1280", - @"minHeight" : @"720", - @"minFrameRate" : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - self._usingFrontCamera = YES; - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionUnspecified; - } - videoDevice = [self findDeviceForPosition:position]; - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - //TODO(rostopira): refactor to separate function and add support for max - - self._targetWidth = 1280; - self._targetHeight = 720; - self._targetFps = 30; - - id mandatory = videoConstraints[@"mandatory"]; - // constraints.video.mandatory - if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) - { - id widthConstraint = mandatory[@"minWidth"]; - if ([widthConstraint isKindOfClass:[NSString class]]) { - int possibleWidth = [widthConstraint intValue]; - if (possibleWidth != 0) { - self._targetWidth = possibleWidth; - } - } - id heightConstraint = mandatory[@"minHeight"]; - if ([heightConstraint isKindOfClass:[NSString class]]) { - int possibleHeight = [heightConstraint intValue]; - if (possibleHeight != 0) { - self._targetHeight = possibleHeight; - } - } - id fpsConstraint = mandatory[@"minFrameRate"]; - if ([fpsConstraint isKindOfClass:[NSString class]]) { - int possibleFps = [fpsConstraint intValue]; - if (possibleFps != 0) { - self._targetFps = possibleFps; - } - } - } - - if (videoDevice) { - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - if (self.videoCapturer) { - [self.videoCapturer stopCapture]; - } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { - if (error) { - NSLog(@"Start capture error: %@", [error localizedDescription]); - } - }]; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result { - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; - - [screenCapturer startCapture]; - - //TODO: - self.videoCapturer = screenCapturer; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); -} - --(void)createLocalMediaStream:(FlutterResult)result{ - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": [mediaStream streamId] }); -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track result:(FlutterResult) result -{ - if (!self.videoCapturer) { - result(@NO); - return; - } - if (self.videoCapturer.captureSession.inputs.count == 0) { - result(@NO); - return; - } - - AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; - AVCaptureDevice *device = deviceInput.device; - - result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); -} - --(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track torch:(BOOL)torch result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't set torch"); - return; - } - if (self.videoCapturer.captureSession.inputs.count == 0) { - NSLog(@"Video capturer is missing an input. Can't set torch"); - return; - } - - AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; - AVCaptureDevice *device = deviceInput.device; - - if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { - NSLog(@"Current capture device does not support torch. Can't set torch"); - return; - } - - NSError *error; - if ([device lockForConfiguration:&error] == NO) { - NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); - return; - } - - device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; - [device unlockForConfiguration]; - - result(nil); -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - --(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't capture frame."); - return; - } - - FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - if (position == AVCaptureDevicePositionUnspecified) { - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; - for (AVCaptureDevice *device in captureDevices) { - if (device.position == position) { - return device; - } - } - return captureDevices[0]; -} - -- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { - NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; - AVCaptureDeviceFormat *selectedFormat = nil; - int currentDiff = INT_MAX; - for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); - if (diff < currentDiff) { - selectedFormat = format; - currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { - selectedFormat = format; - } - } - return selectedFormat; -} - -- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { - Float64 maxSupportedFramerate = 0; - for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { - maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); - } - return fmin(maxSupportedFramerate, self._targetFps); -} - -@end diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b99f885b0a..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,43 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - --(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection; - -@end diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index 6517f85ffe..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,504 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - dataChannel.flutterChannelId = dataChannelId; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -@end - diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformView.h b/ios/Classes/FlutterRTCVideoPlatformView.h new file mode 100644 index 0000000000..01e1215ea8 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformView.h @@ -0,0 +1,17 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +@interface FlutterRTCVideoPlatformView : UIView + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +- (instancetype _Nonnull)initWithFrame:(CGRect)frame; + +- (void)setSize:(CGSize)size; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformView.m b/ios/Classes/FlutterRTCVideoPlatformView.m new file mode 100644 index 0000000000..2f44ea47d2 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformView.m @@ -0,0 +1,135 @@ +#import "FlutterRTCVideoPlatformView.h" + +@implementation FlutterRTCVideoPlatformView { + CGSize _videoSize; + AVSampleBufferDisplayLayer* _videoLayer; + CGSize _remoteVideoSize; + CATransform3D _bufferTransform; + RTCVideoRotation _lastVideoRotation; +} + +- (instancetype)initWithFrame:(CGRect)frame { + if (self = [super initWithFrame:frame]) { + _videoLayer = [[AVSampleBufferDisplayLayer alloc] init]; + _videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + _videoLayer.frame = CGRectZero; + _bufferTransform = CATransform3DIdentity; + _lastVideoRotation = RTCVideoRotation_0; + [self.layer addSublayer:_videoLayer]; + self.opaque = NO; + } + return self; +} + +- (void)layoutSubviews { + _videoLayer.frame = self.bounds; + [_videoLayer removeAllAnimations]; +} + +- (void)setSize:(CGSize)size { + _remoteVideoSize = size; +} + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + + CVPixelBufferRef pixelBuffer = nil; + if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBuffer = ((RTCCVPixelBuffer*)frame.buffer).pixelBuffer; + CFRetain(pixelBuffer); + } else if ([frame.buffer isKindOfClass:[RTCI420Buffer class]]) { + pixelBuffer = [self toCVPixelBuffer:frame]; + } + + if (_lastVideoRotation != frame.rotation) { + _bufferTransform = [self fromFrameRotation:frame.rotation]; + _videoLayer.transform = _bufferTransform; + [_videoLayer layoutIfNeeded]; + _lastVideoRotation = frame.rotation; + } + + CMSampleBufferRef sampleBuffer = [self sampleBufferFromPixelBuffer:pixelBuffer]; + if (sampleBuffer) { + if (@available(iOS 14.0, *)) { + if([_videoLayer requiresFlushToResumeDecoding]) { + [_videoLayer flushAndRemoveImage]; + } + } else { + // Fallback on earlier versions + } + [_videoLayer enqueueSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + } + + CFRelease(pixelBuffer); +} + +- (CVPixelBufferRef)toCVPixelBuffer:(RTCVideoFrame*)frame { + CVPixelBufferRef outputPixelBuffer; + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, frame.width, frame.height, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer); + id i420Buffer = (RTCI420Buffer*)frame.buffer; + + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); + return outputPixelBuffer; +} + +- (CMSampleBufferRef)sampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer { + CMSampleBufferRef sampleBuffer = NULL; + OSStatus err = noErr; + CMVideoFormatDescriptionRef formatDesc = NULL; + err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); + if (err != noErr) { + return nil; + } + CMSampleTimingInfo sampleTimingInfo = kCMTimingInfoInvalid; + err = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDesc, + &sampleTimingInfo, &sampleBuffer); + if (sampleBuffer) { + CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); + CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); + CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); + } + if (err != noErr) { + return nil; + } + formatDesc = nil; + return sampleBuffer; +} + +- (CATransform3D)fromFrameRotation:(RTCVideoRotation)rotation { + switch (rotation) { + case RTCVideoRotation_0: + return CATransform3DIdentity; + case RTCVideoRotation_90: + return CATransform3DMakeRotation(M_PI / 2.0, 0, 0, 1); + case RTCVideoRotation_180: + return CATransform3DMakeRotation(M_PI, 0, 0, 1); + case RTCVideoRotation_270: + return CATransform3DMakeRotation(-M_PI / 0, 0, 0, 1); + } + return CATransform3DIdentity; +} + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.h b/ios/Classes/FlutterRTCVideoPlatformViewController.h new file mode 100644 index 0000000000..cb15ed7b35 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewController.h @@ -0,0 +1,23 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +@interface FlutterRTCVideoPlatformViewController + : NSObject + +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) FlutterEventSink _Nonnull eventSink; +@property(nonatomic) int64_t viewId; +@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; + +- (instancetype _Nullable)initWithMessenger:(NSObject* _Nonnull)messenger + viewIdentifier:(int64_t)viewId + frame:(CGRect)frame; + +- (UIView* _Nonnull)view; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.m b/ios/Classes/FlutterRTCVideoPlatformViewController.m new file mode 100644 index 0000000000..1f227ee1d5 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewController.m @@ -0,0 +1,116 @@ +#import "FlutterRTCVideoPlatformViewController.h" +#import "FlutterRTCVideoPlatformView.h" +#import "FlutterWebRTCPlugin.h" + +@implementation FlutterRTCVideoPlatformViewController { + FlutterRTCVideoPlatformView* _videoView; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; + CGSize _renderSize; + RTCVideoRotation _rotation; +} + +@synthesize messenger = _messenger; +@synthesize eventSink = _eventSink; +@synthesize viewId = _viewId; + +- (instancetype)initWithMessenger:(NSObject*)messenger + viewIdentifier:(int64_t)viewId + frame:(CGRect)frame { + self = [super init]; + if (self) { + _isFirstFrameRendered = false; + _renderSize = CGSizeZero; + _rotation = -1; + _messenger = messenger; + _videoView = [[FlutterRTCVideoPlatformView alloc] initWithFrame:frame]; + _viewId = viewId; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/PlatformViewId%lld", viewId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + + return self; +} + +- (UIView*)view { + return _videoView; +} + +- (void)setVideoTrack:(RTCVideoTrack*)videoTrack { + RTCVideoTrack* oldValue = self.videoTrack; + if (oldValue == videoTrack) { + return; + } + _videoTrack = videoTrack; + _isFirstFrameRendered = false; + if (!oldValue) { + [oldValue removeRenderer:(id)self]; + _videoView.frame = CGRectZero; + } + if (videoTrack) { + [videoTrack addRenderer:(id)self]; + } +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame*)frame { + if (_renderSize.width != frame.width || _renderSize.height != frame.height || + !_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"didPlatformViewChangeVideoSize", + @"id" : @(self.viewId), + @"width" : @(frame.width), + @"height" : @(frame.height), + }); + } + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if (frame.rotation != _rotation || !_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"didPlatformViewChangeRotation", + @"id" : @(self.viewId), + @"rotation" : @(frame.rotation), + }); + } + _rotation = frame.rotation; + } + + if (!_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{@"event" : @"didFirstFrameRendered"}); + } + self->_isFirstFrameRendered = true; + } + + [_videoView renderFrame:frame]; +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + [_videoView setSize:size]; +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.h b/ios/Classes/FlutterRTCVideoPlatformViewFactory.h new file mode 100644 index 0000000000..55e43b944c --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewFactory.h @@ -0,0 +1,19 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#define FLutterRTCVideoPlatformViewFactoryID @"rtc_video_platform_view" + +@class FlutterRTCVideoPlatformViewController; + +@interface FLutterRTCVideoPlatformViewFactory : NSObject + +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; + +- (_Nonnull instancetype)initWithMessenger:(NSObject* _Nonnull)messenger; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.m b/ios/Classes/FlutterRTCVideoPlatformViewFactory.m new file mode 100644 index 0000000000..c70bd68ab4 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewFactory.m @@ -0,0 +1,34 @@ +#import "FlutterRTCVideoPlatformViewFactory.h" +#import "FlutterRTCVideoPlatformViewController.h" + +@implementation FLutterRTCVideoPlatformViewFactory { +} + +@synthesize messenger = _messenger; + +- (instancetype)initWithMessenger:(NSObject*)messenger { + self = [super init]; + if (self) { + _messenger = messenger; + self.renders = [NSMutableDictionary new]; + } + + return self; +} + +- (NSObject*)createArgsCodec { + return [FlutterStandardMessageCodec sharedInstance]; +} + +- (NSObject*)createWithFrame:(CGRect)frame + viewIdentifier:(int64_t)viewId + arguments:(id _Nullable)args { + FlutterRTCVideoPlatformViewController* render = + [[FlutterRTCVideoPlatformViewController alloc] initWithMessenger:_messenger + viewIdentifier:viewId + frame:frame]; + self.renders[@(viewId)] = render; + return render; +} + +@end diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index d1bd7b8675..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,33 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 1f112b24ae..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,282 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" - -#import -#import -#import -#import - -#import -#include "libyuv.h" - -#import "FlutterWebRTCPlugin.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _frameSize; - CGSize _renderSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - _isFirstFrameRendered = false; - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - - --(id) correctRotation:(const id) src - withRotation:(RTCVideoRotation) rotation -{ - - int rotated_width = src.width; - int rotated_height = src.height; - - if (rotation == RTCVideoRotation_90 || - rotation == RTCVideoRotation_270) { - int temp = rotated_width; - rotated_width = rotated_height; - rotated_height = temp; - } - - id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; - - I420Rotate(src.dataY, src.strideY, - src.dataU, src.strideU, - src.dataV, src.strideV, - (uint8_t*)buffer.dataY, buffer.strideY, - (uint8_t*)buffer.dataU,buffer.strideU, - (uint8_t*)buffer.dataV, buffer.strideV, - src.width, src.height, - (RotationModeEnum)rotation); - - return buffer; -} - --(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame -{ - id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; - CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); - - const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); - if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // NV12 - uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); - const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); - uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); - const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); - - I420ToNV12(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dstY, - (int)dstYStride, - dstUV, - (int)dstUVStride, - i420Buffer.width, - i420Buffer.height); - } else { - uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); - const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); - - if (pixelFormat == kCVPixelFormatType_32BGRA) { - // Corresponds to libyuv::FOURCC_ARGB - I420ToARGB(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } else if (pixelFormat == kCVPixelFormatType_32ARGB) { - // Corresponds to libyuv::FOURCC_BGRA - I420ToBGRA(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } - } - - CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - if(_renderSize.width != frame.width || _renderSize.height != frame.height){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(frame.width), - @"height": @(frame.height), - }); - } - }); - _renderSize = CGSizeMake(frame.width, frame.height); - } - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - if (!strongSelf->_isFirstFrameRendered) { - if (strongSelf.eventSink) { - strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); - strongSelf->_isFirstFrameRendered = true; - } - } - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - - _frameSize = size; - } -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; -} - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId{ - - RTCVideoTrack *videoTrack; - RTCMediaStream *stream = [self streamForId:streamId peerConnectionId:peerConnectionId]; - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"No video track for RTCMediaStream: %@", streamId); - } - } else { - videoTrack = nil; - } - - view.videoTrack = videoTrack; -} - -@end - diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index 3be67f3518..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,30 +0,0 @@ -#import -#import - -#import -#import -#import -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ -@property (nonatomic, strong) NSObject* messenger; -@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; -@property (nonatomic) BOOL _usingFrontCamera; -@property (nonatomic) int _targetWidth; -@property (nonatomic) int _targetHeight; -@property (nonatomic) int _targetFps; - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index c515f3b381..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,825 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" - -#import -#import - - - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; - BOOL _speakerOn; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"FlutterWebRTC.Method" - binaryMessenger:[registrar messenger]]; - UIViewController *viewController = (UIViewController *)registrar.messenger; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - viewController:viewController - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - viewController:(UIViewController *)viewController - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - _speakerOn = NO; - self.viewController = viewController; - } - - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:nil]; - - return self; -} - - -- (void)didSessionRouteChange:(NSNotification *)notification { - NSDictionary *interuptionDict = notification.userInfo; - NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; - - switch (routeChangeReason) { - case AVAudioSessionRouteChangeReasonCategoryChange: { - NSError* error; - [[AVAudioSession sharedInstance] overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; - } - break; - - default: - break; - } -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getDisplayMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getDisplayMedia:constraints result:result]; - } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { - [self createLocalMediaStream:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"captureFrame" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* path = argsMap[@"path"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStreamTrack *track = [self trackForId: trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"sendDtmf" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* tone = argsMap[@"tone"]; - int duration = ((NSNumber*)argsMap[@"duration"]).intValue; - int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - - RTCRtpSender* audioSender = nil ; - for( RTCRtpSender *rtpSender in peerConnection.senders){ - if([[[rtpSender track] kind] isEqualToString:@"audio"]) { - audioSender = rtpSender; - } - } - if(audioSender){ - NSOperationQueue *queue = [[NSOperationQueue alloc] init]; - [queue addOperationWithBlock:^{ - double durationMs = duration / 1000.0; - double interToneGapMs = interToneGap / 1000.0; - [audioSender.dtmfSender insertDtmf :(NSString *)tone - duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; - NSLog(@"DTMF Tone played "); - }]; - } - - result(@{@"result": @"success"}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if ([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - } else if ([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* type = argsMap[@"type"]; - id data = argsMap[@"data"]; - - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - } else if ([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - } else if ([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if(source){ - [self.videoCapturer stopCapture]; - self.videoCapturer = nil; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* enabled = argsMap[@"enabled"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil){ - track.isEnabled = enabled.boolValue; - } - result(nil); - } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil) { - if([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - [stream addAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]){ - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [stream addVideoTrack:videoTrack]; - } - } else { - result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" message:nil details:nil]); - } - } else { - result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" message:nil details:nil]); - } - result(nil); - } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil) { - if([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - [stream removeAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]){ - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [stream removeVideoTrack:videoTrack]; - } - } else { - result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" message:nil details:nil]); - } - } else { - result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" message:nil details:nil]); - } - result(nil); - } else if ([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (peerConnection) { - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - } - result(nil); - } else if ([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - } else if ([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - NSString *peerConnectionId = argsMap[@"ownerTag"]; - if(render){ - [self setStreamId:streamId view:render peerConnectionId:peerConnectionId]; - } - result(nil); - } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackHasTorch:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - BOOL torch = [argsMap[@"torch"] boolValue]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSwitchCamera:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setVolume" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* volume = argsMap[@"volume"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - RTCAudioSource *audioSource = audioTrack.source; - audioSource.volume = [volume doubleValue]; - } - result(nil); - } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - audioTrack.isEnabled = !mute.boolValue; - } - result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* enable = argsMap[@"enable"]; - _speakerOn = enable.boolValue; - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:_speakerOn ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 - error:nil]; - [audioSession setActive:YES error:nil]; - result(nil); - } else if ([@"getLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.localDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.remoteDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setConfiguration" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - } - return stream; -} - -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId -{ - RTCMediaStreamTrack *track = _localTracks[trackId]; - if (!track) { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - track = peerConnection.remoteTracks[trackId]; - if (track) { - break; - } - } - } - - return track; -} - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { - config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; - } - - if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { - NSString *bundlePolicy = json[@"bundlePolicy"]; - if ([bundlePolicy isEqualToString:@"balanced"]) { - config.bundlePolicy = RTCBundlePolicyBalanced; - } else if ([bundlePolicy isEqualToString:@"max-compat"]) { - config.bundlePolicy = RTCBundlePolicyMaxCompat; - } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { - config.bundlePolicy = RTCBundlePolicyMaxBundle; - } - } - - if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { - config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; - } - - if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { - config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - - if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { - NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; - if ([iceTransportPolicy isEqualToString:@"all"]) { - config.iceTransportPolicy = RTCIceTransportPolicyAll; - } else if ([iceTransportPolicy isEqualToString:@"none"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNone; - } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNoHost; - } else if ([iceTransportPolicy isEqualToString:@"relay"]) { - config.iceTransportPolicy = RTCIceTransportPolicyRelay; - } - } - - if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { - NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; - if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; - } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; - } - } - - if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { - NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; - if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; - } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; - } - } - - if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { - NSString *sdpSemantics = json[@"sdpSemantics"]; - if ([sdpSemantics isEqualToString:@"plan-b"]) { - config.sdpSemantics = RTCSdpSemanticsPlanB; - } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { - config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - } - } - - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/ios/Classes/LocalAudioTrack.h b/ios/Classes/LocalAudioTrack.h new file mode 120000 index 0000000000..421b56b2af --- /dev/null +++ b/ios/Classes/LocalAudioTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.h \ No newline at end of file diff --git a/ios/Classes/LocalAudioTrack.m b/ios/Classes/LocalAudioTrack.m new file mode 120000 index 0000000000..71fa724d15 --- /dev/null +++ b/ios/Classes/LocalAudioTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.m \ No newline at end of file diff --git a/ios/Classes/LocalTrack.h b/ios/Classes/LocalTrack.h new file mode 120000 index 0000000000..7d41789949 --- /dev/null +++ b/ios/Classes/LocalTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalTrack.h \ No newline at end of file diff --git a/ios/Classes/LocalVideoTrack.h b/ios/Classes/LocalVideoTrack.h new file mode 120000 index 0000000000..5069f7dd17 --- /dev/null +++ b/ios/Classes/LocalVideoTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.h \ No newline at end of file diff --git a/ios/Classes/LocalVideoTrack.m b/ios/Classes/LocalVideoTrack.m new file mode 120000 index 0000000000..182490a4fb --- /dev/null +++ b/ios/Classes/LocalVideoTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.m \ No newline at end of file diff --git a/ios/Classes/RTCAudioSource+Private.h b/ios/Classes/RTCAudioSource+Private.h new file mode 120000 index 0000000000..7ce3b77fd6 --- /dev/null +++ b/ios/Classes/RTCAudioSource+Private.h @@ -0,0 +1 @@ +../../common/darwin/Classes/RTCAudioSource+Private.h \ No newline at end of file diff --git a/ios/Classes/VideoProcessingAdapter.h b/ios/Classes/VideoProcessingAdapter.h new file mode 120000 index 0000000000..d93141230c --- /dev/null +++ b/ios/Classes/VideoProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.h \ No newline at end of file diff --git a/ios/Classes/VideoProcessingAdapter.m b/ios/Classes/VideoProcessingAdapter.m new file mode 120000 index 0000000000..c80ad1ca73 --- /dev/null +++ b/ios/Classes/VideoProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.m \ No newline at end of file diff --git a/ios/Classes/audio_sink_bridge.cpp b/ios/Classes/audio_sink_bridge.cpp new file mode 120000 index 0000000000..13215e8454 --- /dev/null +++ b/ios/Classes/audio_sink_bridge.cpp @@ -0,0 +1 @@ +../../common/darwin/Classes/audio_sink_bridge.cpp \ No newline at end of file diff --git a/ios/Classes/media_stream_interface.h b/ios/Classes/media_stream_interface.h new file mode 120000 index 0000000000..5810a86316 --- /dev/null +++ b/ios/Classes/media_stream_interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/media_stream_interface.h \ No newline at end of file diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec index aa19eea685..778368a8a0 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/flutter_webrtc.podspec @@ -3,7 +3,7 @@ # Pod::Spec.new do |s| s.name = 'flutter_webrtc' - s.version = '0.2.2' + s.version = '0.14.0' s.summary = 'Flutter WebRTC plugin for iOS.' s.description = <<-DESC A new flutter plugin project. @@ -15,9 +15,12 @@ A new flutter plugin project. s.source_files = 'Classes/**/*' s.public_header_files = 'Classes/**/*.h' s.dependency 'Flutter' - s.dependency 'Libyuv', '1703' - s.dependency 'GoogleWebRTC', '1.1.29400' - s.ios.deployment_target = '10.0' + s.dependency 'WebRTC-SDK', '125.6422.07' + s.ios.deployment_target = '13.0' s.static_framework = true + s.pod_target_xcconfig = { + 'CLANG_CXX_LANGUAGE_STANDARD' => 'c++14', + 'USER_HEADER_SEARCH_PATHS' => 'Classes/**/*.h' + } + s.libraries = 'c++' end - diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index a0a39116ff..b7dd3a8fc6 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -1,27 +1,25 @@ library flutter_webrtc; -export 'src/enums.dart'; -export 'src/get_user_media.dart' - if (dart.library.html) 'src/web/get_user_media.dart'; -export 'src/media_recorder.dart' - if (dart.library.html) 'src/web/media_recorder.dart'; -export 'src/media_stream.dart' - if (dart.library.html) 'src/web/media_stream.dart'; -export 'src/media_stream_track.dart' - if (dart.library.html) 'src/web/media_stream_track.dart'; -export 'src/rtc_data_channel.dart' - if (dart.library.html) 'src/web/rtc_data_channel.dart'; -export 'src/rtc_dtmf_sender.dart' - if (dart.library.html) 'src/web/rtc_dtmf_sender.dart'; -export 'src/rtc_ice_candidate.dart' - if (dart.library.html) 'src/web/rtc_ice_candidate.dart'; -export 'src/rtc_peerconnection.dart' - if (dart.library.html) 'src/web/rtc_peerconnection.dart'; -export 'src/rtc_peerconnection_factory.dart' - if (dart.library.html) 'src/web/rtc_peerconnection_factory.dart'; -export 'src/rtc_session_description.dart' - if (dart.library.html) 'src/web/rtc_session_description.dart'; -export 'src/rtc_stats_report.dart'; -export 'src/rtc_video_view.dart' - if (dart.library.html) 'src/web/rtc_video_view.dart'; -export 'src/utils.dart' if (dart.library.html) 'src/web/utils.dart'; +export 'package:webrtc_interface/webrtc_interface.dart' + hide MediaDevices, MediaRecorder, Navigator; + +export 'src/helper.dart'; +export 'src/desktop_capturer.dart'; +export 'src/media_devices.dart'; +export 'src/media_recorder.dart'; +export 'src/video_renderer_extension.dart'; +export 'src/native/factory_impl.dart' + if (dart.library.js_interop) 'src/web/factory_impl.dart'; +export 'src/native/rtc_video_renderer_impl.dart' + if (dart.library.js_interop) 'src/web/rtc_video_renderer_impl.dart'; +export 'src/native/rtc_video_view_impl.dart' + if (dart.library.js_interop) 'src/web/rtc_video_view_impl.dart'; +export 'src/native/utils.dart' + if (dart.library.js_interop) 'src/web/utils.dart'; +export 'src/native/adapter_type.dart'; +export 'src/native/camera_utils.dart'; +export 'src/native/audio_management.dart'; +export 'src/native/android/audio_configuration.dart'; +export 'src/native/ios/audio_configuration.dart'; +export 'src/native/rtc_video_platform_view_controller.dart'; +export 'src/native/rtc_video_platform_view.dart'; diff --git a/lib/src/desktop_capturer.dart b/lib/src/desktop_capturer.dart new file mode 100644 index 0000000000..1780cb593c --- /dev/null +++ b/lib/src/desktop_capturer.dart @@ -0,0 +1,71 @@ +import 'dart:async'; +import 'dart:typed_data'; + +enum SourceType { + Screen, + Window, +} + +final desktopSourceTypeToString = { + SourceType.Screen: 'screen', + SourceType.Window: 'window', +}; + +final tringToDesktopSourceType = { + 'screen': SourceType.Screen, + 'window': SourceType.Window, +}; + +class ThumbnailSize { + ThumbnailSize(this.width, this.height); + factory ThumbnailSize.fromMap(Map map) { + return ThumbnailSize(map['width'], map['height']); + } + int width; + int height; + + Map toMap() => {'width': width, 'height': height}; +} + +abstract class DesktopCapturerSource { + /// The identifier of a window or screen that can be used as a + /// chromeMediaSourceId constraint when calling + String get id; + + /// A screen source will be named either Entire Screen or Screen , + /// while the name of a window source will match the window title. + String get name; + + ///A thumbnail image of the source. jpeg encoded. + Uint8List? get thumbnail; + + /// specified in the options passed to desktopCapturer.getSources. + /// The actual size depends on the scale of the screen or window. + ThumbnailSize get thumbnailSize; + + /// The type of the source. + SourceType get type; + + StreamController get onNameChanged => throw UnimplementedError(); + + StreamController get onThumbnailChanged => + throw UnimplementedError(); +} + +abstract class DesktopCapturer { + StreamController get onAdded => + throw UnimplementedError(); + StreamController get onRemoved => + throw UnimplementedError(); + StreamController get onNameChanged => + throw UnimplementedError(); + StreamController get onThumbnailChanged => + throw UnimplementedError(); + + ///Get the screen source of the specified types + Future> getSources( + {required List types, ThumbnailSize? thumbnailSize}); + + /// Updates the list of screen sources of the specified types + Future updateSources({required List types}); +} diff --git a/lib/src/enums.dart b/lib/src/enums.dart deleted file mode 100644 index 7bc1721d20..0000000000 --- a/lib/src/enums.dart +++ /dev/null @@ -1,108 +0,0 @@ -enum RecorderAudioChannel { INPUT, OUTPUT } - -/// RTCDataChannelMessage type -enum MessageType { text, binary } - -enum RTCDataChannelState { - RTCDataChannelConnecting, - RTCDataChannelOpen, - RTCDataChannelClosing, - RTCDataChannelClosed, -} - -enum RTCSignalingState { - RTCSignalingStateStable, - RTCSignalingStateHaveLocalOffer, - RTCSignalingStateHaveRemoteOffer, - RTCSignalingStateHaveLocalPrAnswer, - RTCSignalingStateHaveRemotePrAnswer, - RTCSignalingStateClosed -} - -enum RTCIceGatheringState { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete -} - -enum RTCIceConnectionState { - RTCIceConnectionStateNew, - RTCIceConnectionStateChecking, - RTCIceConnectionStateCompleted, - RTCIceConnectionStateConnected, - RTCIceConnectionStateCount, - RTCIceConnectionStateFailed, - RTCIceConnectionStateDisconnected, - RTCIceConnectionStateClosed, -} - -enum RTCVideoViewObjectFit { - RTCVideoViewObjectFitContain, - RTCVideoViewObjectFitCover, -} - -RTCIceConnectionState iceConnectionStateForString(String state) { - switch (state) { - case 'new': - return RTCIceConnectionState.RTCIceConnectionStateNew; - case 'checking': - return RTCIceConnectionState.RTCIceConnectionStateChecking; - case 'connected': - return RTCIceConnectionState.RTCIceConnectionStateConnected; - case 'completed': - return RTCIceConnectionState.RTCIceConnectionStateCompleted; - case 'failed': - return RTCIceConnectionState.RTCIceConnectionStateFailed; - case 'disconnected': - return RTCIceConnectionState.RTCIceConnectionStateDisconnected; - case 'closed': - return RTCIceConnectionState.RTCIceConnectionStateClosed; - case 'count': - return RTCIceConnectionState.RTCIceConnectionStateCount; - } - return RTCIceConnectionState.RTCIceConnectionStateClosed; -} - -RTCIceGatheringState iceGatheringStateforString(String state) { - switch (state) { - case 'new': - return RTCIceGatheringState.RTCIceGatheringStateNew; - case 'gathering': - return RTCIceGatheringState.RTCIceGatheringStateGathering; - case 'complete': - return RTCIceGatheringState.RTCIceGatheringStateComplete; - } - return RTCIceGatheringState.RTCIceGatheringStateNew; -} - -RTCSignalingState signalingStateForString(String state) { - switch (state) { - case 'stable': - return RTCSignalingState.RTCSignalingStateStable; - case 'have-local-offer': - return RTCSignalingState.RTCSignalingStateHaveLocalOffer; - case 'have-local-pranswer': - return RTCSignalingState.RTCSignalingStateHaveLocalPrAnswer; - case 'have-remote-offer': - return RTCSignalingState.RTCSignalingStateHaveRemoteOffer; - case 'have-remote-pranswer': - return RTCSignalingState.RTCSignalingStateHaveRemotePrAnswer; - case 'closed': - return RTCSignalingState.RTCSignalingStateClosed; - } - return RTCSignalingState.RTCSignalingStateClosed; -} - -RTCDataChannelState rtcDataChannelStateForString(String state) { - switch (state) { - case 'connecting': - return RTCDataChannelState.RTCDataChannelConnecting; - case 'open': - return RTCDataChannelState.RTCDataChannelOpen; - case 'closing': - return RTCDataChannelState.RTCDataChannelClosing; - case 'closed': - return RTCDataChannelState.RTCDataChannelClosed; - } - return RTCDataChannelState.RTCDataChannelClosed; -} diff --git a/lib/src/get_user_media.dart b/lib/src/get_user_media.dart deleted file mode 100644 index 2c387a3950..0000000000 --- a/lib/src/get_user_media.dart +++ /dev/null @@ -1,58 +0,0 @@ -import 'dart:async'; -import 'package:flutter/services.dart'; -import 'media_stream.dart'; -import 'utils.dart'; - -class MediaDevices { - static Future getUserMedia( - Map mediaConstraints) async { - var channel = WebRTC.methodChannel(); - try { - final response = await channel.invokeMethod>( - 'getUserMedia', - {'constraints': mediaConstraints}, - ); - String streamId = response['streamId']; - var stream = MediaStream(streamId, 'local'); - stream.setMediaTracks(response['audioTracks'], response['videoTracks']); - return stream; - } on PlatformException catch (e) { - throw 'Unable to getUserMedia: ${e.message}'; - } - } - -/* Implement screen sharing, - * use MediaProjection for Android and use ReplayKit for iOS - * TODO(cloudwebrtc): implement for native layer. - * */ - static Future getDisplayMedia( - Map mediaConstraints) async { - var channel = WebRTC.methodChannel(); - try { - final response = await channel.invokeMethod>( - 'getDisplayMedia', - {'constraints': mediaConstraints}, - ); - String streamId = response['streamId']; - var stream = MediaStream(streamId, 'local'); - stream.setMediaTracks(response['audioTracks'], response['videoTracks']); - return stream; - } on PlatformException catch (e) { - throw 'Unable to getDisplayMedia: ${e.message}'; - } - } - - static Future> getSources() async { - var channel = WebRTC.methodChannel(); - try { - final response = await channel.invokeMethod>( - 'getSources', - {}, - ); - List sources = response['sources']; - return sources; - } on PlatformException catch (e) { - throw 'Unable to getSources: ${e.message}'; - } - } -} diff --git a/lib/src/helper.dart b/lib/src/helper.dart new file mode 100644 index 0000000000..6f1e9666bf --- /dev/null +++ b/lib/src/helper.dart @@ -0,0 +1,182 @@ +import 'dart:math'; + +import 'package:flutter/foundation.dart'; + +import '../flutter_webrtc.dart'; + +class Helper { + static Future> enumerateDevices(String type) async { + var devices = await navigator.mediaDevices.enumerateDevices(); + return devices.where((d) => d.kind == type).toList(); + } + + /// Return the available cameras + /// + /// Note: Make sure to call this gettet after + /// navigator.mediaDevices.getUserMedia(), otherwise the devices will not be + /// listed. + static Future> get cameras => + enumerateDevices('videoinput'); + + /// Return the available audiooutputs + /// + /// Note: Make sure to call this gettet after + /// navigator.mediaDevices.getUserMedia(), otherwise the devices will not be + /// listed. + static Future> get audiooutputs => + enumerateDevices('audiooutput'); + + /// For web implementation, make sure to pass the target deviceId + static Future switchCamera(MediaStreamTrack track, + [String? deviceId, MediaStream? stream]) async { + if (track.kind != 'video') { + throw 'The is not a video track => $track'; + } + + if (!kIsWeb) { + return WebRTC.invokeMethod( + 'mediaStreamTrackSwitchCamera', + {'trackId': track.id}, + ).then((value) => value ?? false); + } + + if (deviceId == null) throw 'You need to specify the deviceId'; + if (stream == null) throw 'You need to specify the stream'; + + var cams = await cameras; + if (!cams.any((e) => e.deviceId == deviceId)) { + throw 'The provided deviceId is not available, make sure to retreive the deviceId from Helper.cammeras()'; + } + + // stop only video tracks + // so that we can recapture video track + stream.getVideoTracks().forEach((track) { + track.stop(); + stream.removeTrack(track); + }); + + var mediaConstraints = { + 'audio': false, // NO need to capture audio again + 'video': {'deviceId': deviceId} + }; + + var newStream = await openCamera(mediaConstraints); + var newCamTrack = newStream.getVideoTracks()[0]; + + await stream.addTrack(newCamTrack, addToNative: true); + + return Future.value(true); + } + + static Future setZoom(MediaStreamTrack videoTrack, double zoomLevel) => + CameraUtils.setZoom(videoTrack, zoomLevel); + + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) => + CameraUtils.setFocusMode(videoTrack, focusMode); + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setFocusPoint(videoTrack, point); + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) => + CameraUtils.setExposureMode(videoTrack, exposureMode); + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setExposurePoint(videoTrack, point); + + /// Used to select a specific audio output device. + /// + /// Note: This method is only used for Flutter native, + /// supported on iOS/Android/macOS/Windows. + /// + /// Android/macOS/Windows: Can be used to switch all output devices. + /// iOS: you can only switch directly between the + /// speaker and the preferred device + /// web: flutter web can use RTCVideoRenderer.audioOutput instead + static Future selectAudioOutput(String deviceId) async { + await navigator.mediaDevices + .selectAudioOutput(AudioOutputOptions(deviceId: deviceId)); + } + + /// Set audio input device for Flutter native + /// Note: The usual practice in flutter web is to use deviceId as the + /// `getUserMedia` parameter to get a new audio track and replace it with the + /// audio track in the original rtpsender. + static Future selectAudioInput(String deviceId) => + NativeAudioManagement.selectAudioInput(deviceId); + + /// Enable or disable speakerphone + /// for iOS/Android only + static Future setSpeakerphoneOn(bool enable) => + NativeAudioManagement.setSpeakerphoneOn(enable); + + /// Ensure audio session + /// for iOS only + static Future ensureAudioSession() => + NativeAudioManagement.ensureAudioSession(); + + /// Enable speakerphone, but use bluetooth if audio output device available + /// for iOS/Android only + static Future setSpeakerphoneOnButPreferBluetooth() => + NativeAudioManagement.setSpeakerphoneOnButPreferBluetooth(); + + /// To select a a specific camera, you need to set constraints + /// eg. + /// var constraints = { + /// 'audio': true, + /// 'video': { + /// 'deviceId': Helper.cameras[0].deviceId, + /// } + /// }; + /// + /// var stream = await Helper.openCamera(constraints); + /// + static Future openCamera(Map mediaConstraints) { + return navigator.mediaDevices.getUserMedia(mediaConstraints); + } + + /// Set the volume for Flutter native + static Future setVolume(double volume, MediaStreamTrack track) => + NativeAudioManagement.setVolume(volume, track); + + /// Set the microphone mute/unmute for Flutter native + static Future setMicrophoneMute(bool mute, MediaStreamTrack track) => + NativeAudioManagement.setMicrophoneMute(mute, track); + + /// Set the audio configuration to for Android. + /// Must be set before initiating a WebRTC session and cannot be changed + /// mid session. + static Future setAndroidAudioConfiguration( + AndroidAudioConfiguration androidAudioConfiguration) => + AndroidNativeAudioManagement.setAndroidAudioConfiguration( + androidAudioConfiguration); + + /// After Android app finishes a session, on audio focus loss, clear the active communication device. + static Future clearAndroidCommunicationDevice() => + WebRTC.invokeMethod('clearAndroidCommunicationDevice'); + + /// Set the audio configuration for iOS + static Future setAppleAudioConfiguration( + AppleAudioConfiguration appleAudioConfiguration) => + AppleNativeAudioManagement.setAppleAudioConfiguration( + appleAudioConfiguration); + + /// Set the audio configuration for iOS + static Future setAppleAudioIOMode(AppleAudioIOMode mode, + {bool preferSpeakerOutput = false}) => + AppleNativeAudioManagement.setAppleAudioConfiguration( + AppleNativeAudioManagement.getAppleAudioConfigurationForMode(mode, + preferSpeakerOutput: preferSpeakerOutput)); + + /// Request capture permission for Android + static Future requestCapturePermission() async { + if (WebRTC.platformIsAndroid) { + return await WebRTC.invokeMethod('requestCapturePermission'); + } else { + throw Exception('requestCapturePermission only support for Android'); + } + } +} diff --git a/lib/src/media_devices.dart b/lib/src/media_devices.dart new file mode 100644 index 0000000000..3b2f643ddf --- /dev/null +++ b/lib/src/media_devices.dart @@ -0,0 +1,23 @@ +import '../flutter_webrtc.dart'; + +class MediaDevices { + @Deprecated( + 'Use the navigator.mediaDevices.getUserMedia(Map) provide from the factory instead') + static Future getUserMedia( + Map mediaConstraints) async { + return navigator.mediaDevices.getUserMedia(mediaConstraints); + } + + @Deprecated( + 'Use the navigator.mediaDevices.getDisplayMedia(Map) provide from the factory instead') + static Future getDisplayMedia( + Map mediaConstraints) async { + return navigator.mediaDevices.getDisplayMedia(mediaConstraints); + } + + @Deprecated( + 'Use the navigator.mediaDevices.getSources() provide from the factory instead') + static Future> getSources() { + return navigator.mediaDevices.getSources(); + } +} diff --git a/lib/src/media_recorder.dart b/lib/src/media_recorder.dart index 4fe8355f2f..370cfa9c78 100644 --- a/lib/src/media_recorder.dart +++ b/lib/src/media_recorder.dart @@ -1,41 +1,47 @@ -import 'dart:async'; -import 'dart:math'; +import 'package:flutter/foundation.dart'; -import 'enums.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'utils.dart'; +import 'package:webrtc_interface/webrtc_interface.dart' as rtc; -class MediaRecorder { - static final _random = Random(); - final _recorderId = _random.nextInt(0x7FFFFFFF); +import '../flutter_webrtc.dart'; +import 'native/media_recorder_impl.dart' show MediaRecorderNative; - Future start(String path, - {MediaStreamTrack videoTrack, RecorderAudioChannel audioChannel - // TODO(cloudwebrtc): add codec/quality options - }) async { - if (path == null) { - throw ArgumentError.notNull('path'); - } +class MediaRecorder extends rtc.MediaRecorder { + MediaRecorder({ + String? albumName, + }) : _delegate = (kIsWeb || kIsWasm) + ? mediaRecorder() + : MediaRecorderNative(albumName: albumName); - if (audioChannel == null && videoTrack == null) { - throw Exception('Neither audio nor video track were provided'); - } + final rtc.MediaRecorder _delegate; - await WebRTC.methodChannel().invokeMethod('startRecordToFile', { - 'path': path, - 'audioChannel': audioChannel?.index, - 'videoTrackId': videoTrack?.id, - 'recorderId': _recorderId - }); + @override + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + int rotationDegrees = 0, + }) { + return _delegate.start( + path, + videoTrack: videoTrack, + audioChannel: audioChannel, + ); } - void startWeb(MediaStream stream, - {Function(dynamic blob, bool isLastOne) onDataChunk, - String mimeType = 'video/mp4;codecs=h264'}) { - throw 'It\'s for Flutter Web only'; - } + @override + Future stop() => _delegate.stop(); - Future stop() async => await WebRTC.methodChannel() - .invokeMethod('stopRecordToFile', {'recorderId': _recorderId}); + @override + void startWeb( + MediaStream stream, { + Function(dynamic blob, bool isLastOne)? onDataChunk, + String? mimeType, + int timeSlice = 1000, + }) => + _delegate.startWeb( + stream, + onDataChunk: onDataChunk, + mimeType: mimeType ?? 'video/webm', + timeSlice: timeSlice, + ); } diff --git a/lib/src/media_stream.dart b/lib/src/media_stream.dart deleted file mode 100644 index 37625e0f3d..0000000000 --- a/lib/src/media_stream.dart +++ /dev/null @@ -1,80 +0,0 @@ -import 'dart:async'; -import 'media_stream_track.dart'; -import 'utils.dart'; - -class MediaStream { - MediaStream(this._streamId, this._ownerTag); - final _channel = WebRTC.methodChannel(); - final String _streamId; - final String _ownerTag; - final _audioTracks = []; - final _videoTracks = []; - String get ownerTag => _ownerTag; - String get id => _streamId; - - void setMediaTracks(List audioTracks, List videoTracks) { - _audioTracks.clear(); - audioTracks.forEach((track) { - _audioTracks.add(MediaStreamTrack( - track['id'], track['label'], track['kind'], track['enabled'])); - }); - - _videoTracks.clear(); - videoTracks.forEach((track) { - _videoTracks.add(MediaStreamTrack( - track['id'], track['label'], track['kind'], track['enabled'])); - }); - } - - Future getMediaTracks() async { - final response = await _channel.invokeMethod>( - 'mediaStreamGetTracks', - {'streamId': _streamId}, - ); - - setMediaTracks(response['audioTracks'], response['videoTracks']); - } - - Future addTrack(MediaStreamTrack track, - {bool addToNative = true}) async { - if (track.kind == 'audio') { - _audioTracks.add(track); - } else { - _videoTracks.add(track); - } - - if (addToNative) { - await _channel.invokeMethod('mediaStreamAddTrack', - {'streamId': _streamId, 'trackId': track.id}); - } - } - - Future removeTrack(MediaStreamTrack track, - {bool removeFromNative = true}) async { - if (track.kind == 'audio') { - _audioTracks.removeWhere((it) => it.id == track.id); - } else { - _videoTracks.removeWhere((it) => it.id == track.id); - } - - if (removeFromNative) { - await _channel.invokeMethod('mediaStreamRemoveTrack', - {'streamId': _streamId, 'trackId': track.id}); - } - } - - List getAudioTracks() { - return _audioTracks; - } - - List getVideoTracks() { - return _videoTracks; - } - - Future dispose() async { - await _channel.invokeMethod( - 'streamDispose', - {'streamId': _streamId}, - ); - } -} diff --git a/lib/src/media_stream_track.dart b/lib/src/media_stream_track.dart deleted file mode 100644 index fa41a9e258..0000000000 --- a/lib/src/media_stream_track.dart +++ /dev/null @@ -1,80 +0,0 @@ -import 'dart:async'; -import 'utils.dart'; - -class MediaStreamTrack { - MediaStreamTrack(this._trackId, this._label, this._kind, this._enabled); - final _channel = WebRTC.methodChannel(); - final String _trackId; - final String _label; - final String _kind; - bool _enabled; - - set enabled(bool enabled) { - _channel.invokeMethod('mediaStreamTrackSetEnable', - {'trackId': _trackId, 'enabled': enabled}); - _enabled = enabled; - } - - bool get enabled => _enabled; - - String get label => _label; - - String get kind => _kind; - - String get id => _trackId; - - Future hasTorch() => _channel.invokeMethod( - 'mediaStreamTrackHasTorch', - {'trackId': _trackId}, - ); - - Future setTorch(bool torch) => _channel.invokeMethod( - 'mediaStreamTrackSetTorch', - {'trackId': _trackId, 'torch': torch}, - ); - - ///Future contains isFrontCamera - ///Throws error if switching camera failed - Future switchCamera() => _channel.invokeMethod( - 'mediaStreamTrackSwitchCamera', - {'trackId': _trackId}, - ); - - void setVolume(double volume) async { - await _channel.invokeMethod( - 'setVolume', - {'trackId': _trackId, 'volume': volume}, - ); - } - - void setMicrophoneMute(bool mute) async { - print('MediaStreamTrack:setMicrophoneMute $mute'); - await _channel.invokeMethod( - 'setMicrophoneMute', - {'trackId': _trackId, 'mute': mute}, - ); - } - - void enableSpeakerphone(bool enable) async { - print('MediaStreamTrack:enableSpeakerphone $enable'); - await _channel.invokeMethod( - 'enableSpeakerphone', - {'trackId': _trackId, 'enable': enable}, - ); - } - - /// On Flutter Web returns Future which contains data url on success - Future captureFrame([String filePath]) { - return _channel.invokeMethod( - 'captureFrame', - {'trackId': _trackId, 'path': filePath}, - ); - } - - Future dispose() async { - await _channel.invokeMethod( - 'trackDispose', - {'trackId': _trackId}, - ); - } -} diff --git a/lib/src/native/adapter_type.dart b/lib/src/native/adapter_type.dart new file mode 100644 index 0000000000..fa5f85a833 --- /dev/null +++ b/lib/src/native/adapter_type.dart @@ -0,0 +1,9 @@ +enum AdapterType { + adapterTypeUnknown, + adapterTypeEthernet, + adapterTypeWifi, + adapterTypeCellular, + adapterTypeVpn, + adapterTypeLoopback, + adapterTypeAny +} diff --git a/lib/src/native/android/audio_configuration.dart b/lib/src/native/android/audio_configuration.dart new file mode 100644 index 0000000000..150962b6a0 --- /dev/null +++ b/lib/src/native/android/audio_configuration.dart @@ -0,0 +1,158 @@ +import '../utils.dart'; + +enum AndroidAudioMode { + normal, + callScreening, + inCall, + inCommunication, + ringtone, +} + +extension AndroidAudioModeEnumEx on String { + AndroidAudioMode toAndroidAudioMode() => + AndroidAudioMode.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioFocusMode { + gain, + gainTransient, + gainTransientExclusive, + gainTransientMayDuck +} + +extension AndroidAudioFocusModeEnumEx on String { + AndroidAudioFocusMode toAndroidAudioFocusMode() => + AndroidAudioFocusMode.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioStreamType { + accessibility, + alarm, + dtmf, + music, + notification, + ring, + system, + voiceCall +} + +extension AndroidAudioStreamTypeEnumEx on String { + AndroidAudioStreamType toAndroidAudioStreamType() => + AndroidAudioStreamType.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioAttributesUsageType { + alarm, + assistanceAccessibility, + assistanceNavigationGuidance, + assistanceSonification, + assistant, + game, + media, + notification, + notificationEvent, + notificationRingtone, + unknown, + voiceCommunication, + voiceCommunicationSignalling +} + +extension AndroidAudioAttributesUsageTypeEnumEx on String { + AndroidAudioAttributesUsageType toAndroidAudioAttributesUsageType() => + AndroidAudioAttributesUsageType.values + .firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioAttributesContentType { + movie, + music, + sonification, + speech, + unknown +} + +extension AndroidAudioAttributesContentTypeEnumEx on String { + AndroidAudioAttributesContentType toAndroidAudioAttributesContentType() => + AndroidAudioAttributesContentType.values + .firstWhere((d) => d.name == toLowerCase()); +} + +class AndroidAudioConfiguration { + AndroidAudioConfiguration({ + this.manageAudioFocus, + this.androidAudioMode, + this.androidAudioFocusMode, + this.androidAudioStreamType, + this.androidAudioAttributesUsageType, + this.androidAudioAttributesContentType, + this.forceHandleAudioRouting, + }); + + /// Controls whether audio focus should be automatically managed during + /// a WebRTC session. + final bool? manageAudioFocus; + final AndroidAudioMode? androidAudioMode; + final AndroidAudioFocusMode? androidAudioFocusMode; + final AndroidAudioStreamType? androidAudioStreamType; + final AndroidAudioAttributesUsageType? androidAudioAttributesUsageType; + final AndroidAudioAttributesContentType? androidAudioAttributesContentType; + + /// On certain Android devices, audio routing does not function properly and + /// bluetooth microphones will not work unless audio mode is set to + /// `inCommunication` or `inCall`. Audio routing is turned off those cases. + /// + /// If this set to true, will attempt to do audio routing regardless of audio mode. + final bool? forceHandleAudioRouting; + + Map toMap() => { + if (manageAudioFocus != null) 'manageAudioFocus': manageAudioFocus!, + if (androidAudioMode != null) + 'androidAudioMode': androidAudioMode!.name, + if (androidAudioFocusMode != null) + 'androidAudioFocusMode': androidAudioFocusMode!.name, + if (androidAudioStreamType != null) + 'androidAudioStreamType': androidAudioStreamType!.name, + if (androidAudioAttributesUsageType != null) + 'androidAudioAttributesUsageType': + androidAudioAttributesUsageType!.name, + if (androidAudioAttributesContentType != null) + 'androidAudioAttributesContentType': + androidAudioAttributesContentType!.name, + if (forceHandleAudioRouting != null) + 'forceHandleAudioRouting': forceHandleAudioRouting!, + }; + + /// A pre-configured AndroidAudioConfiguration for media playback. + static final media = AndroidAudioConfiguration( + manageAudioFocus: true, + androidAudioMode: AndroidAudioMode.normal, + androidAudioFocusMode: AndroidAudioFocusMode.gain, + androidAudioStreamType: AndroidAudioStreamType.music, + androidAudioAttributesUsageType: AndroidAudioAttributesUsageType.media, + androidAudioAttributesContentType: + AndroidAudioAttributesContentType.unknown, + ); + + /// A pre-configured AndroidAudioConfiguration for voice communication. + static final communication = AndroidAudioConfiguration( + manageAudioFocus: true, + androidAudioMode: AndroidAudioMode.inCommunication, + androidAudioFocusMode: AndroidAudioFocusMode.gain, + androidAudioStreamType: AndroidAudioStreamType.voiceCall, + androidAudioAttributesUsageType: + AndroidAudioAttributesUsageType.voiceCommunication, + androidAudioAttributesContentType: AndroidAudioAttributesContentType.speech, + ); +} + +class AndroidNativeAudioManagement { + static Future setAndroidAudioConfiguration( + AndroidAudioConfiguration config) async { + if (WebRTC.platformIsAndroid) { + await WebRTC.invokeMethod( + 'setAndroidAudioConfiguration', + {'configuration': config.toMap()}, + ); + } + } +} diff --git a/lib/src/native/audio_management.dart b/lib/src/native/audio_management.dart new file mode 100644 index 0000000000..dabf4d5b0f --- /dev/null +++ b/lib/src/native/audio_management.dart @@ -0,0 +1,69 @@ +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class NativeAudioManagement { + static Future selectAudioInput(String deviceId) async { + await WebRTC.invokeMethod( + 'selectAudioInput', + {'deviceId': deviceId}, + ); + } + + static Future setSpeakerphoneOn(bool enable) async { + await WebRTC.invokeMethod( + 'enableSpeakerphone', + {'enable': enable}, + ); + } + + static Future ensureAudioSession() async { + await WebRTC.invokeMethod('ensureAudioSession'); + } + + static Future setSpeakerphoneOnButPreferBluetooth() async { + await WebRTC.invokeMethod('enableSpeakerphoneButPreferBluetooth'); + } + + static Future setVolume(double volume, MediaStreamTrack track) async { + if (track.kind == 'audio') { + if (kIsWeb) { + final constraints = track.getConstraints(); + constraints['volume'] = volume; + await track.applyConstraints(constraints); + } else { + await WebRTC.invokeMethod('setVolume', { + 'trackId': track.id, + 'volume': volume, + 'peerConnectionId': + track is MediaStreamTrackNative ? track.peerConnectionId : null + }); + } + } + + return Future.value(); + } + + static Future setMicrophoneMute( + bool mute, MediaStreamTrack track) async { + if (track.kind != 'audio') { + throw 'The is not an audio track => $track'; + } + + if (!kIsWeb) { + try { + await WebRTC.invokeMethod( + 'setMicrophoneMute', + {'trackId': track.id, 'mute': mute}, + ); + } on PlatformException catch (e) { + throw 'Unable to MediaStreamTrack::setMicrophoneMute: ${e.message}'; + } + } + track.enabled = !mute; + } +} diff --git a/lib/src/native/camera_utils.dart b/lib/src/native/camera_utils.dart new file mode 100644 index 0000000000..3557a64e46 --- /dev/null +++ b/lib/src/native/camera_utils.dart @@ -0,0 +1,93 @@ +import 'dart:math'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +enum CameraFocusMode { auto, locked } + +enum CameraExposureMode { auto, locked } + +class CameraUtils { + static Future setZoom( + MediaStreamTrack videoTrack, double zoomLevel) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetZoom', + {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, + ); + } else { + throw Exception('setZoom only support for mobile devices!'); + } + } + + /// Set the exposure point for the camera, focusMode can be: + /// 'auto', 'locked' + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusMode', + { + 'trackId': videoTrack.id, + 'focusMode': focusMode.name, + }, + ); + } else { + throw Exception('setFocusMode only support for mobile devices!'); + } + } + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusPoint', + { + 'trackId': videoTrack.id, + 'focusPoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setFocusPoint only support for mobile devices!'); + } + } + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposureMode', + { + 'trackId': videoTrack.id, + 'exposureMode': exposureMode.name, + }, + ); + } else { + throw Exception('setExposureMode only support for mobile devices!'); + } + } + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposurePoint', + { + 'trackId': videoTrack.id, + 'exposurePoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setExposurePoint only support for mobile devices!'); + } + } +} diff --git a/lib/src/native/desktop_capturer_impl.dart b/lib/src/native/desktop_capturer_impl.dart new file mode 100644 index 0000000000..f6cc4aab29 --- /dev/null +++ b/lib/src/native/desktop_capturer_impl.dart @@ -0,0 +1,195 @@ +import 'dart:async'; +import 'dart:typed_data'; + +import '../desktop_capturer.dart'; +import 'event_channel.dart'; +import 'utils.dart'; + +class DesktopCapturerSourceNative extends DesktopCapturerSource { + DesktopCapturerSourceNative( + this._id, this._name, this._thumbnailSize, this._type); + factory DesktopCapturerSourceNative.fromMap(Map map) { + var sourceType = (map['type'] as String) == 'window' + ? SourceType.Window + : SourceType.Screen; + var source = DesktopCapturerSourceNative(map['id'], map['name'], + ThumbnailSize.fromMap(map['thumbnailSize']), sourceType); + if (map['thumbnail'] != null) { + source.thumbnail = map['thumbnail'] as Uint8List; + } + return source; + } + + //ignore: close_sinks + final StreamController _onNameChanged = + StreamController.broadcast(sync: true); + + @override + StreamController get onNameChanged => _onNameChanged; + + final StreamController _onThumbnailChanged = + StreamController.broadcast(sync: true); + + @override + StreamController get onThumbnailChanged => _onThumbnailChanged; + + Uint8List? _thumbnail; + String _name; + final String _id; + final ThumbnailSize _thumbnailSize; + final SourceType _type; + + set thumbnail(Uint8List? value) { + _thumbnail = value; + } + + set name(String name) { + _name = name; + } + + @override + String get id => _id; + + @override + String get name => _name; + + @override + Uint8List? get thumbnail => _thumbnail; + + @override + ThumbnailSize get thumbnailSize => _thumbnailSize; + + @override + SourceType get type => _type; +} + +class DesktopCapturerNative extends DesktopCapturer { + DesktopCapturerNative._internal() { + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data[event]; + handleEvent(event, map); + }); + } + static final DesktopCapturerNative instance = + DesktopCapturerNative._internal(); + + @override + StreamController get onAdded => _onAdded; + final StreamController _onAdded = + StreamController.broadcast(sync: true); + + @override + StreamController get onRemoved => _onRemoved; + final StreamController _onRemoved = + StreamController.broadcast(sync: true); + + @override + StreamController get onNameChanged => _onNameChanged; + final StreamController _onNameChanged = + StreamController.broadcast(sync: true); + + @override + StreamController get onThumbnailChanged => + _onThumbnailChanged; + final StreamController _onThumbnailChanged = + StreamController.broadcast(sync: true); + + final Map _sources = {}; + + void handleEvent(String event, Map map) async { + switch (event) { + case 'desktopSourceAdded': + final source = DesktopCapturerSourceNative.fromMap(map); + if (_sources[source.id] == null) { + _sources[source.id] = source; + _onAdded.add(source); + } + break; + case 'desktopSourceRemoved': + final id = map['id'] as String; + if (_sources[id] != null) { + _onRemoved.add(_sources.remove(id)!); + } + break; + case 'desktopSourceThumbnailChanged': + final source = _sources[map['id'] as String]; + if (source != null) { + try { + source.thumbnail = map['thumbnail'] as Uint8List; + _onThumbnailChanged.add(source); + source.onThumbnailChanged.add(source.thumbnail!); + } catch (e) { + print('desktopSourceThumbnailChanged: $e'); + } + } + break; + case 'desktopSourceNameChanged': + final source = _sources[map['id'] as String]; + if (source != null) { + source.name = map['name']; + _onNameChanged.add(source); + source.onNameChanged.add(source.name); + } + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + Future> getSources( + {required List types, ThumbnailSize? thumbnailSize}) async { + _sources.clear(); + final response = await WebRTC.invokeMethod( + 'getDesktopSources', + { + 'types': types.map((type) => desktopSourceTypeToString[type]).toList(), + if (thumbnailSize != null) 'thumbnailSize': thumbnailSize.toMap(), + }, + ); + if (response == null) { + throw Exception('getDesktopSources return null, something wrong'); + } + for (var source in response['sources']) { + var desktopSource = DesktopCapturerSourceNative.fromMap(source); + _sources[desktopSource.id] = desktopSource; + } + return _sources.values.toList(); + } + + @override + Future updateSources({required List types}) async { + final response = await WebRTC.invokeMethod( + 'updateDesktopSources', + { + 'types': types.map((type) => desktopSourceTypeToString[type]).toList(), + }, + ); + if (response == null) { + throw Exception('updateSources return null, something wrong'); + } + return response['result'] as bool; + } + + Future getThumbnail(DesktopCapturerSourceNative source) async { + final response = await WebRTC.invokeMethod( + 'getDesktopSourceThumbnail', + { + 'sourceId': source.id, + 'thumbnailSize': { + 'width': source.thumbnailSize.width, + 'height': source.thumbnailSize.height + } + }, + ); + if (response == null || !response is Uint8List?) { + throw Exception('getDesktopSourceThumbnail return null, something wrong'); + } + return response as Uint8List?; + } +} diff --git a/lib/src/native/event_channel.dart b/lib/src/native/event_channel.dart new file mode 100644 index 0000000000..de43755a0a --- /dev/null +++ b/lib/src/native/event_channel.dart @@ -0,0 +1,28 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +class FlutterWebRTCEventChannel { + FlutterWebRTCEventChannel._internal() { + EventChannel('FlutterWebRTC.Event') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + static final FlutterWebRTCEventChannel instance = + FlutterWebRTCEventChannel._internal(); + + final StreamController> handleEvents = + StreamController.broadcast(); + + void eventListener(dynamic event) async { + final Map map = event; + handleEvents.add({map['event'] as String: map}); + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } +} diff --git a/lib/src/native/factory_impl.dart b/lib/src/native/factory_impl.dart new file mode 100644 index 0000000000..676e8c67da --- /dev/null +++ b/lib/src/native/factory_impl.dart @@ -0,0 +1,122 @@ +import 'dart:async'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../desktop_capturer.dart'; +import 'desktop_capturer_impl.dart'; +import 'frame_cryptor_impl.dart'; +import 'media_recorder_impl.dart'; +import 'media_stream_impl.dart'; +import 'mediadevices_impl.dart'; +import 'navigator_impl.dart'; +import 'rtc_peerconnection_impl.dart'; +import 'rtc_video_renderer_impl.dart'; +import 'utils.dart'; + +class RTCFactoryNative extends RTCFactory { + RTCFactoryNative._internal(); + + static final RTCFactory instance = RTCFactoryNative._internal(); + + @override + Future createLocalMediaStream(String label) async { + final response = await WebRTC.invokeMethod('createLocalMediaStream'); + if (response == null) { + throw Exception('createLocalMediaStream return null, something wrong'); + } + return MediaStreamNative(response['streamId'], label); + } + + @override + Future createPeerConnection( + Map configuration, + [Map constraints = const {}]) async { + var defaultConstraints = { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': true}, + ], + }; + + final response = await WebRTC.invokeMethod( + 'createPeerConnection', + { + 'configuration': configuration, + 'constraints': constraints.isEmpty ? defaultConstraints : constraints + }, + ); + + String peerConnectionId = response['peerConnectionId']; + return RTCPeerConnectionNative(peerConnectionId, configuration); + } + + @override + MediaRecorder mediaRecorder() { + return MediaRecorderNative(); + } + + @override + VideoRenderer videoRenderer() { + return RTCVideoRenderer(); + } + + @override + Navigator get navigator => NavigatorNative.instance; + + @override + FrameCryptorFactory get frameCryptorFactory => + FrameCryptorFactoryImpl.instance; + + @override + Future getRtpReceiverCapabilities(String kind) async { + final response = await WebRTC.invokeMethod( + 'getRtpReceiverCapabilities', + { + 'kind': kind, + }, + ); + return RTCRtpCapabilities.fromMap(response); + } + + @override + Future getRtpSenderCapabilities(String kind) async { + final response = await WebRTC.invokeMethod( + 'getRtpSenderCapabilities', + { + 'kind': kind, + }, + ); + return RTCRtpCapabilities.fromMap(response); + } +} + +Future createPeerConnection( + Map configuration, + [Map constraints = const {}]) async { + return RTCFactoryNative.instance + .createPeerConnection(configuration, constraints); +} + +Future createLocalMediaStream(String label) async { + return RTCFactoryNative.instance.createLocalMediaStream(label); +} + +Future getRtpReceiverCapabilities(String kind) async { + return RTCFactoryNative.instance.getRtpReceiverCapabilities(kind); +} + +Future getRtpSenderCapabilities(String kind) async { + return RTCFactoryNative.instance.getRtpSenderCapabilities(kind); +} + +MediaRecorder mediaRecorder() { + return RTCFactoryNative.instance.mediaRecorder(); +} + +Navigator get navigator => RTCFactoryNative.instance.navigator; + +DesktopCapturer get desktopCapturer => DesktopCapturerNative.instance; + +MediaDevices get mediaDevices => MediaDeviceNative.instance; + +FrameCryptorFactory get frameCryptorFactory => FrameCryptorFactoryImpl.instance; diff --git a/lib/src/native/frame_cryptor_impl.dart b/lib/src/native/frame_cryptor_impl.dart new file mode 100644 index 0000000000..715e294cf1 --- /dev/null +++ b/lib/src/native/frame_cryptor_impl.dart @@ -0,0 +1,339 @@ +import 'dart:async'; +import 'dart:core'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'utils.dart'; + +class KeyProviderImpl implements KeyProvider { + KeyProviderImpl(this._id); + final String _id; + @override + String get id => _id; + + @override + Future setSharedKey({required Uint8List key, int index = 0}) async { + try { + await WebRTC.invokeMethod('keyProviderSetSharedKey', { + 'keyProviderId': _id, + 'keyIndex': index, + 'key': key, + }); + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::setSharedKey: ${e.message}'; + } + } + + @override + Future ratchetSharedKey({int index = 0}) async { + try { + final response = await WebRTC.invokeMethod( + 'keyProviderRatchetSharedKey', { + 'keyProviderId': _id, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::ratchetSharedKey: ${e.message}'; + } + } + + @override + Future exportSharedKey({int index = 0}) async { + try { + final response = await WebRTC.invokeMethod( + 'keyProviderExportSharedKey', { + 'keyProviderId': _id, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::exportSharedKey: ${e.message}'; + } + } + + @override + Future setKey({ + required String participantId, + required int index, + required Uint8List key, + }) async { + try { + final response = + await WebRTC.invokeMethod('keyProviderSetKey', { + 'keyProviderId': _id, + 'keyIndex': index, + 'key': key, + 'participantId': participantId, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::setKey: ${e.message}'; + } + } + + @override + Future ratchetKey({ + required String participantId, + required int index, + }) async { + try { + final response = + await WebRTC.invokeMethod('keyProviderRatchetKey', { + 'keyProviderId': _id, + 'keyIndex': index, + 'participantId': participantId, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::ratchetKey: ${e.message}'; + } + } + + @override + Future exportKey({ + required String participantId, + required int index, + }) async { + try { + final response = + await WebRTC.invokeMethod('keyProviderExportKey', { + 'keyProviderId': _id, + 'participantId': participantId, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::exportSharedKey: ${e.message}'; + } + } + + @override + Future setSifTrailer({required Uint8List trailer}) async { + try { + await WebRTC.invokeMethod('keyProviderSetSifTrailer', { + 'keyProviderId': _id, + 'sifTrailer': trailer, + }); + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::setSifTrailer: ${e.message}'; + } + } + + @override + Future dispose() async { + try { + await WebRTC.invokeMethod('keyProviderDispose', { + 'keyProviderId': _id, + }); + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::dispose: ${e.message}'; + } + } +} + +class FrameCryptorFactoryImpl implements FrameCryptorFactory { + FrameCryptorFactoryImpl._internal(); + + static final FrameCryptorFactoryImpl instance = + FrameCryptorFactoryImpl._internal(); + + @override + Future createFrameCryptorForRtpSender({ + required String participantId, + required RTCRtpSender sender, + required Algorithm algorithm, + required KeyProvider keyProvider, + }) async { + RTCRtpSenderNative nativeSender = sender as RTCRtpSenderNative; + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorFactoryCreateFrameCryptor', { + 'peerConnectionId': nativeSender.peerConnectionId, + 'rtpSenderId': sender.senderId, + 'participantId': participantId, + 'keyProviderId': keyProvider.id, + 'algorithm': algorithm.index, + 'type': 'sender', + }); + var frameCryptorId = response['frameCryptorId']; + return FrameCryptorImpl(frameCryptorId, participantId); + } on PlatformException catch (e) { + throw 'Unable to FrameCryptorFactory::createFrameCryptorForRtpSender: ${e.message}'; + } + } + + @override + Future createFrameCryptorForRtpReceiver({ + required String participantId, + required RTCRtpReceiver receiver, + required Algorithm algorithm, + required KeyProvider keyProvider, + }) async { + RTCRtpReceiverNative nativeReceiver = receiver as RTCRtpReceiverNative; + + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorFactoryCreateFrameCryptor', { + 'peerConnectionId': nativeReceiver.peerConnectionId, + 'rtpReceiverId': nativeReceiver.receiverId, + 'participantId': participantId, + 'keyProviderId': keyProvider.id, + 'algorithm': algorithm.index, + 'type': 'receiver', + }); + var frameCryptorId = response['frameCryptorId']; + return FrameCryptorImpl(frameCryptorId, participantId); + } on PlatformException catch (e) { + throw 'Unable to FrameCryptorFactory::createFrameCryptorForRtpReceiver: ${e.message}'; + } + } + + @override + Future createDefaultKeyProvider( + KeyProviderOptions options) async { + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorFactoryCreateKeyProvider', { + 'keyProviderOptions': options.toJson(), + }); + String keyProviderId = response['keyProviderId']; + return KeyProviderImpl(keyProviderId); + } on PlatformException catch (e) { + throw 'Unable to FrameCryptorFactory::createKeyProvider: ${e.message}'; + } + } +} + +class FrameCryptorImpl extends FrameCryptor { + FrameCryptorImpl(this._frameCryptorId, this._participantId) { + _eventSubscription = _eventChannelFor(_frameCryptorId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + final String _frameCryptorId; + final String _participantId; + @override + String get participantId => _participantId; + + StreamSubscription? _eventSubscription; + + EventChannel _eventChannelFor(String peerConnectionId) { + return EventChannel('FlutterWebRTC/frameCryptorEvent$_frameCryptorId'); + } + + void errorListener(Object obj) { + if (obj is Exception) throw obj; + } + + FrameCryptorState _cryptorStateFromString(String str) { + switch (str) { + case 'new': + return FrameCryptorState.FrameCryptorStateNew; + case 'ok': + return FrameCryptorState.FrameCryptorStateOk; + case 'decryptionFailed': + return FrameCryptorState.FrameCryptorStateDecryptionFailed; + case 'encryptionFailed': + return FrameCryptorState.FrameCryptorStateEncryptionFailed; + case 'internalError': + return FrameCryptorState.FrameCryptorStateInternalError; + case "keyRatcheted": + return FrameCryptorState.FrameCryptorStateKeyRatcheted; + case 'missingKey': + return FrameCryptorState.FrameCryptorStateMissingKey; + default: + throw 'Unknown FrameCryptorState: $str'; + } + } + + void eventListener(dynamic event) { + final Map map = event; + switch (map['event']) { + case 'frameCryptionStateChanged': + var state = _cryptorStateFromString(map['state']); + var participantId = map['participantId']; + onFrameCryptorStateChanged?.call(participantId, state); + break; + } + } + + @override + Future updateCodec(String codec) async { + /// only needs for flutter web + } + + @override + Future setKeyIndex(int index) async { + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorSetKeyIndex', { + 'frameCryptorId': _frameCryptorId, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setKeyIndex: ${e.message}'; + } + } + + @override + Future get keyIndex async { + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorGetKeyIndex', { + 'frameCryptorId': _frameCryptorId, + }); + return response['keyIndex']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::keyIndex: ${e.message}'; + } + } + + @override + Future setEnabled(bool enabled) async { + try { + final response = + await WebRTC.invokeMethod('frameCryptorSetEnabled', { + 'frameCryptorId': _frameCryptorId, + 'enabled': enabled, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setEnabled: ${e.message}'; + } + } + + @override + Future get enabled async { + try { + final response = + await WebRTC.invokeMethod('frameCryptorGetEnabled', { + 'frameCryptorId': _frameCryptorId, + }); + return response['enabled']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::enabled: ${e.message}'; + } + } + + @override + Future dispose() async { + _eventSubscription?.cancel(); + _eventSubscription = null; + try { + final response = + await WebRTC.invokeMethod('frameCryptorDispose', { + 'frameCryptorId': _frameCryptorId, + }); + var res = response['result']; + print('res $res'); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::dispose: ${e.message}'; + } + } +} diff --git a/lib/src/native/ios/audio_configuration.dart b/lib/src/native/ios/audio_configuration.dart new file mode 100644 index 0000000000..b840a0a8ae --- /dev/null +++ b/lib/src/native/ios/audio_configuration.dart @@ -0,0 +1,123 @@ +import '../utils.dart'; + +enum AppleAudioMode { + default_, + gameChat, + measurement, + moviePlayback, + spokenAudio, + videoChat, + videoRecording, + voiceChat, + voicePrompt, +} + +extension AppleAudioModeEnumEx on String { + AppleAudioMode toAppleAudioMode() => + AppleAudioMode.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AppleAudioCategory { + soloAmbient, + playback, + record, + playAndRecord, + multiRoute, +} + +extension AppleAudioCategoryEnumEx on String { + AppleAudioCategory toAppleAudioCategory() => + AppleAudioCategory.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AppleAudioCategoryOption { + mixWithOthers, + duckOthers, + interruptSpokenAudioAndMixWithOthers, + allowBluetooth, + allowBluetoothA2DP, + allowAirPlay, + defaultToSpeaker, +} + +extension AppleAudioCategoryOptionEnumEx on String { + AppleAudioCategoryOption toAppleAudioCategoryOption() => + AppleAudioCategoryOption.values + .firstWhere((d) => d.name == toLowerCase()); +} + +class AppleAudioConfiguration { + AppleAudioConfiguration({ + this.appleAudioCategory, + this.appleAudioCategoryOptions, + this.appleAudioMode, + }); + final AppleAudioCategory? appleAudioCategory; + final Set? appleAudioCategoryOptions; + final AppleAudioMode? appleAudioMode; + + Map toMap() => { + if (appleAudioCategory != null) + 'appleAudioCategory': appleAudioCategory!.name, + if (appleAudioCategoryOptions != null) + 'appleAudioCategoryOptions': + appleAudioCategoryOptions!.map((e) => e.name).toList(), + if (appleAudioMode != null) 'appleAudioMode': appleAudioMode!.name, + }; +} + +enum AppleAudioIOMode { + none, + remoteOnly, + localOnly, + localAndRemote, +} + +class AppleNativeAudioManagement { + static AppleAudioIOMode currentMode = AppleAudioIOMode.none; + + static AppleAudioConfiguration getAppleAudioConfigurationForMode( + AppleAudioIOMode mode, + {bool preferSpeakerOutput = false}) { + currentMode = mode; + if (mode == AppleAudioIOMode.remoteOnly) { + return AppleAudioConfiguration( + appleAudioCategory: AppleAudioCategory.playback, + appleAudioCategoryOptions: { + AppleAudioCategoryOption.mixWithOthers, + }, + appleAudioMode: AppleAudioMode.spokenAudio, + ); + } else if ([ + AppleAudioIOMode.localOnly, + AppleAudioIOMode.localAndRemote, + ].contains(mode)) { + return AppleAudioConfiguration( + appleAudioCategory: AppleAudioCategory.playAndRecord, + appleAudioCategoryOptions: { + AppleAudioCategoryOption.allowBluetooth, + AppleAudioCategoryOption.mixWithOthers, + }, + appleAudioMode: preferSpeakerOutput + ? AppleAudioMode.videoChat + : AppleAudioMode.voiceChat, + ); + } + + return AppleAudioConfiguration( + appleAudioCategory: AppleAudioCategory.soloAmbient, + appleAudioCategoryOptions: {}, + appleAudioMode: AppleAudioMode.default_, + ); + } + + static Future setAppleAudioConfiguration( + AppleAudioConfiguration config) async { + if (WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'setAppleAudioConfiguration', + {'configuration': config.toMap()}, + ); + } + } +} diff --git a/lib/src/native/media_recorder_impl.dart b/lib/src/native/media_recorder_impl.dart new file mode 100644 index 0000000000..15f4ae22dd --- /dev/null +++ b/lib/src/native/media_recorder_impl.dart @@ -0,0 +1,58 @@ +import 'dart:async'; +import 'dart:math'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class MediaRecorderNative extends MediaRecorder { + MediaRecorderNative({ + String? albumName = 'FlutterWebRTC', + }) : _albumName = albumName; + static final _random = Random(); + final _recorderId = _random.nextInt(0x7FFFFFFF); + var _isStarted = false; + final String? _albumName; + + @override + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + }) async { + if (audioChannel == null && videoTrack == null) { + throw Exception('Neither audio nor video track were provided'); + } + + await WebRTC.invokeMethod('startRecordToFile', { + 'path': path, + if (audioChannel != null) 'audioChannel': audioChannel.index, + if (videoTrack != null) 'videoTrackId': videoTrack.id, + 'recorderId': _recorderId, + 'peerConnectionId': videoTrack is MediaStreamTrackNative + ? videoTrack.peerConnectionId + : null + }); + _isStarted = true; + } + + @override + void startWeb(MediaStream stream, + {Function(dynamic blob, bool isLastOne)? onDataChunk, + String? mimeType, + int timeSlice = 1000}) { + throw 'It\'s for Flutter Web only'; + } + + @override + Future stop() async { + if (!_isStarted) { + throw "Media recorder not started!"; + } + return await WebRTC.invokeMethod('stopRecordToFile', { + 'recorderId': _recorderId, + 'albumName': _albumName, + }); + } +} diff --git a/lib/src/native/media_stream_impl.dart b/lib/src/native/media_stream_impl.dart new file mode 100644 index 0000000000..49f27a7e5a --- /dev/null +++ b/lib/src/native/media_stream_impl.dart @@ -0,0 +1,110 @@ +import 'dart:async'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'factory_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class MediaStreamNative extends MediaStream { + MediaStreamNative(super.streamId, super.ownerTag); + + factory MediaStreamNative.fromMap(Map map) { + return MediaStreamNative(map['streamId'], map['ownerTag']) + ..setMediaTracks(map['audioTracks'], map['videoTracks']); + } + + final _audioTracks = []; + final _videoTracks = []; + + void setMediaTracks(List audioTracks, List videoTracks) { + _audioTracks.clear(); + + for (var track in audioTracks) { + _audioTracks.add(MediaStreamTrackNative(track['id'], track['label'], + track['kind'], track['enabled'], ownerTag, track['settings'] ?? {})); + } + + _videoTracks.clear(); + for (var track in videoTracks) { + _videoTracks.add(MediaStreamTrackNative(track['id'], track['label'], + track['kind'], track['enabled'], ownerTag, track['settings'] ?? {})); + } + } + + @override + List getTracks() { + return [..._audioTracks, ..._videoTracks]; + } + + @override + Future getMediaTracks() async { + final response = await WebRTC.invokeMethod( + 'mediaStreamGetTracks', + {'streamId': id}, + ); + + setMediaTracks(response['audioTracks'], response['videoTracks']); + } + + @override + Future addTrack(MediaStreamTrack track, + {bool addToNative = true}) async { + if (track.kind == 'audio') { + _audioTracks.add(track); + } else { + _videoTracks.add(track); + } + + if (addToNative) { + await WebRTC.invokeMethod('mediaStreamAddTrack', + {'streamId': id, 'trackId': track.id}); + } + } + + @override + Future removeTrack(MediaStreamTrack track, + {bool removeFromNative = true}) async { + if (track.kind == 'audio') { + _audioTracks.removeWhere((it) => it.id == track.id); + } else { + _videoTracks.removeWhere((it) => it.id == track.id); + } + + if (removeFromNative) { + await WebRTC.invokeMethod('mediaStreamRemoveTrack', + {'streamId': id, 'trackId': track.id}); + } + } + + @override + List getAudioTracks() { + return _audioTracks; + } + + @override + List getVideoTracks() { + return _videoTracks; + } + + @override + Future dispose() async { + await WebRTC.invokeMethod( + 'streamDispose', + {'streamId': id}, + ); + } + + @override + // TODO(cloudwebrtc): Implement + bool get active => throw UnimplementedError(); + + @override + Future clone() async { + final cloneStream = await createLocalMediaStream(id); + for (var track in [..._audioTracks, ..._videoTracks]) { + await cloneStream.addTrack(track); + } + return cloneStream; + } +} diff --git a/lib/src/native/media_stream_track_impl.dart b/lib/src/native/media_stream_track_impl.dart new file mode 100644 index 0000000000..e5c714f6f4 --- /dev/null +++ b/lib/src/native/media_stream_track_impl.dart @@ -0,0 +1,132 @@ +import 'dart:async'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:path_provider/path_provider.dart'; +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../helper.dart'; +import 'utils.dart'; + +class MediaStreamTrackNative extends MediaStreamTrack { + MediaStreamTrackNative(this._trackId, this._label, this._kind, this._enabled, + this._peerConnectionId, + [this.settings_ = const {}]); + + factory MediaStreamTrackNative.fromMap( + Map map, String peerConnectionId) { + return MediaStreamTrackNative(map['id'], map['label'], map['kind'], + map['enabled'], peerConnectionId, map['settings'] ?? {}); + } + final String _trackId; + final String _label; + final String _kind; + final String _peerConnectionId; + final Map settings_; + + bool _enabled; + + bool _muted = false; + + String get peerConnectionId => _peerConnectionId; + + @override + set enabled(bool enabled) { + WebRTC.invokeMethod('mediaStreamTrackSetEnable', { + 'trackId': _trackId, + 'enabled': enabled, + 'peerConnectionId': _peerConnectionId + }); + _enabled = enabled; + + if (kind == 'audio') { + _muted = !enabled; + muted ? onMute?.call() : onUnMute?.call(); + } + } + + @override + bool get enabled => _enabled; + + @override + String get label => _label; + + @override + String get kind => _kind; + + @override + String get id => _trackId; + + @override + bool get muted => _muted; + + @override + Future hasTorch() => WebRTC.invokeMethod( + 'mediaStreamTrackHasTorch', + {'trackId': _trackId}, + ).then((value) => value ?? false); + + @override + Future setTorch(bool torch) => WebRTC.invokeMethod( + 'mediaStreamTrackSetTorch', + {'trackId': _trackId, 'torch': torch}, + ); + + @override + Future switchCamera() => Helper.switchCamera(this); + + Future setZoom(double zoomLevel) => Helper.setZoom(this, zoomLevel); + + @Deprecated('Use Helper.setSpeakerphoneOn instead') + @override + void enableSpeakerphone(bool enable) async { + return Helper.setSpeakerphoneOn(enable); + } + + @override + Future captureFrame() async { + var filePath = await getTemporaryDirectory(); + await WebRTC.invokeMethod( + 'captureFrame', + { + 'trackId': _trackId, + 'peerConnectionId': _peerConnectionId, + 'path': '${filePath.path}/captureFrame.png' + }, + ); + return File('${filePath.path}/captureFrame.png') + .readAsBytes() + .then((value) => value.buffer); + } + + @override + Future applyConstraints([Map? constraints]) { + if (constraints == null) return Future.value(); + + var current = getConstraints(); + if (constraints.containsKey('volume') && + current['volume'] != constraints['volume']) { + Helper.setVolume(constraints['volume'], this); + } + + return Future.value(); + } + + @override + Map getSettings() { + return settings_.map((key, value) => MapEntry(key.toString(), value)); + } + + @override + Future dispose() async { + return stop(); + } + + @override + Future stop() async { + await WebRTC.invokeMethod( + 'trackDispose', + {'trackId': _trackId}, + ); + } +} diff --git a/lib/src/native/mediadevices_impl.dart b/lib/src/native/mediadevices_impl.dart new file mode 100644 index 0000000000..47f585615c --- /dev/null +++ b/lib/src/native/mediadevices_impl.dart @@ -0,0 +1,111 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'event_channel.dart'; +import 'media_stream_impl.dart'; +import 'utils.dart'; + +class MediaDeviceNative extends MediaDevices { + MediaDeviceNative._internal() { + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data.values.first; + handleEvent(event, map); + }); + } + + static final MediaDeviceNative instance = MediaDeviceNative._internal(); + + void handleEvent(String event, final Map map) async { + switch (map['event']) { + case 'onDeviceChange': + ondevicechange?.call(null); + break; + } + } + + @override + Future getUserMedia( + Map mediaConstraints) async { + try { + final response = await WebRTC.invokeMethod( + 'getUserMedia', + {'constraints': mediaConstraints}, + ); + if (response == null) { + throw Exception('getUserMedia return null, something wrong'); + } + + String streamId = response['streamId']; + var stream = MediaStreamNative(streamId, 'local'); + stream.setMediaTracks( + response['audioTracks'] ?? [], response['videoTracks'] ?? []); + return stream; + } on PlatformException catch (e) { + throw 'Unable to getUserMedia: ${e.message}'; + } + } + + @override + Future getDisplayMedia( + Map mediaConstraints) async { + try { + final response = await WebRTC.invokeMethod( + 'getDisplayMedia', + {'constraints': mediaConstraints}, + ); + if (response == null) { + throw Exception('getDisplayMedia return null, something wrong'); + } + String streamId = response['streamId']; + var stream = MediaStreamNative(streamId, 'local'); + stream.setMediaTracks(response['audioTracks'], response['videoTracks']); + return stream; + } on PlatformException catch (e) { + throw 'Unable to getDisplayMedia: ${e.message}'; + } + } + + @override + Future> getSources() async { + try { + final response = await WebRTC.invokeMethod( + 'getSources', + {}, + ); + + List sources = response['sources']; + + return sources; + } on PlatformException catch (e) { + throw 'Unable to getSources: ${e.message}'; + } + } + + @override + Future> enumerateDevices() async { + var source = await getSources(); + return source + .map( + (e) => MediaDeviceInfo( + deviceId: e['deviceId'], + groupId: e['groupId'], + kind: e['kind'], + label: e['label']), + ) + .toList(); + } + + @override + Future selectAudioOutput( + [AudioOutputOptions? options]) async { + await WebRTC.invokeMethod('selectAudioOutput', { + 'deviceId': options?.deviceId, + }); + // TODO(cloudwebrtc): return the selected device + return MediaDeviceInfo(label: 'label', deviceId: options!.deviceId); + } +} diff --git a/lib/src/native/navigator_impl.dart b/lib/src/native/navigator_impl.dart new file mode 100644 index 0000000000..cafe6d7dce --- /dev/null +++ b/lib/src/native/navigator_impl.dart @@ -0,0 +1,27 @@ +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'mediadevices_impl.dart'; + +class NavigatorNative extends Navigator { + NavigatorNative._internal(); + + static final NavigatorNative instance = NavigatorNative._internal(); + + @override + Future getDisplayMedia(Map mediaConstraints) { + return mediaDevices.getDisplayMedia(mediaConstraints); + } + + @override + Future getSources() { + return mediaDevices.enumerateDevices(); + } + + @override + Future getUserMedia(Map mediaConstraints) { + return mediaDevices.getUserMedia(mediaConstraints); + } + + @override + MediaDevices get mediaDevices => MediaDeviceNative.instance; +} diff --git a/lib/src/native/rtc_data_channel_impl.dart b/lib/src/native/rtc_data_channel_impl.dart new file mode 100644 index 0000000000..2c3c84b676 --- /dev/null +++ b/lib/src/native/rtc_data_channel_impl.dart @@ -0,0 +1,143 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +final _typeStringToMessageType = { + 'text': MessageType.text, + 'binary': MessageType.binary +}; + +/// A class that represents a WebRTC datachannel. +/// Can send and receive text and binary messages. +class RTCDataChannelNative extends RTCDataChannel { + RTCDataChannelNative( + this._peerConnectionId, this._label, this._dataChannelId, this._flutterId, + {RTCDataChannelState? state}) { + stateChangeStream = _stateChangeController.stream; + messageStream = _messageController.stream; + if (state != null) { + _state = state; + } + _eventSubscription = _eventChannelFor(_peerConnectionId, _flutterId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + final String _peerConnectionId; + final String _label; + int _bufferedAmount = 0; + @override + // ignore: overridden_fields + int? bufferedAmountLowThreshold; + + /// Id for the datachannel in the Flutter <-> Native layer. + final String _flutterId; + + int? _dataChannelId; + RTCDataChannelState? _state; + StreamSubscription? _eventSubscription; + + @override + RTCDataChannelState? get state => _state; + + @override + int? get id => _dataChannelId; + + /// Get label. + @override + String? get label => _label; + + @override + int? get bufferedAmount => _bufferedAmount; + + final _stateChangeController = + StreamController.broadcast(sync: true); + final _messageController = + StreamController.broadcast(sync: true); + + /// RTCDataChannel event listener. + void eventListener(dynamic event) { + final Map map = event; + switch (map['event']) { + case 'dataChannelStateChanged': + _dataChannelId = map['id']; + _state = rtcDataChannelStateForString(map['state']); + onDataChannelState?.call(_state!); + + _stateChangeController.add(_state!); + break; + case 'dataChannelReceiveMessage': + _dataChannelId = map['id']; + + var type = _typeStringToMessageType[map['type']]; + dynamic data = map['data']; + RTCDataChannelMessage message; + if (type == MessageType.binary) { + message = RTCDataChannelMessage.fromBinary(data); + } else { + message = RTCDataChannelMessage(data); + } + + onMessage?.call(message); + + _messageController.add(message); + break; + + case 'dataChannelBufferedAmountChange': + _bufferedAmount = map['bufferedAmount']; + if (bufferedAmountLowThreshold != null) { + if (_bufferedAmount < bufferedAmountLowThreshold!) { + onBufferedAmountLow?.call(_bufferedAmount); + } + } + onBufferedAmountChange?.call(_bufferedAmount, map['changedAmount']); + break; + } + } + + EventChannel _eventChannelFor(String peerConnectionId, String flutterId) { + return EventChannel( + 'FlutterWebRTC/dataChannelEvent$peerConnectionId$flutterId'); + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + Future getBufferedAmount() async { + final Map response = await WebRTC.invokeMethod( + 'dataChannelGetBufferedAmount', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _flutterId + }); + _bufferedAmount = response['bufferedAmount']; + return _bufferedAmount; + } + + @override + Future send(RTCDataChannelMessage message) async { + await WebRTC.invokeMethod('dataChannelSend', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _flutterId, + 'type': message.isBinary ? 'binary' : 'text', + 'data': message.isBinary ? message.binary : message.text, + }); + } + + @override + Future close() async { + await _stateChangeController.close(); + await _messageController.close(); + await _eventSubscription?.cancel(); + await WebRTC.invokeMethod('dataChannelClose', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _flutterId + }); + } +} diff --git a/lib/src/native/rtc_dtmf_sender_impl.dart b/lib/src/native/rtc_dtmf_sender_impl.dart new file mode 100644 index 0000000000..4eb7fd9878 --- /dev/null +++ b/lib/src/native/rtc_dtmf_sender_impl.dart @@ -0,0 +1,30 @@ +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +class RTCDTMFSenderNative extends RTCDTMFSender { + RTCDTMFSenderNative(this._peerConnectionId, this._rtpSenderId); + // peer connection Id must be defined as a variable where this function will be called. + final String _peerConnectionId; + final String _rtpSenderId; + + @override + Future insertDTMF(String tones, + {int duration = 100, int interToneGap = 70}) async { + await WebRTC.invokeMethod('sendDtmf', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId, + 'tone': tones, + 'duration': duration, + 'gap': interToneGap, + }); + } + + @override + Future canInsertDtmf() async { + return await WebRTC.invokeMethod('canInsertDtmf', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId + }); + } +} diff --git a/lib/src/native/rtc_peerconnection_impl.dart b/lib/src/native/rtc_peerconnection_impl.dart new file mode 100644 index 0000000000..3fe69ab63e --- /dev/null +++ b/lib/src/native/rtc_peerconnection_impl.dart @@ -0,0 +1,622 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_data_channel_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; +import 'utils.dart'; + +/* + * PeerConnection + */ +class RTCPeerConnectionNative extends RTCPeerConnection { + RTCPeerConnectionNative(this._peerConnectionId, this._configuration) { + _eventSubscription = _eventChannelFor(_peerConnectionId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + // private: + final String _peerConnectionId; + StreamSubscription? _eventSubscription; + final _localStreams = []; + final _remoteStreams = []; + RTCDataChannelNative? _dataChannel; + Map _configuration; + RTCSignalingState? _signalingState; + RTCIceGatheringState? _iceGatheringState; + RTCIceConnectionState? _iceConnectionState; + RTCPeerConnectionState? _connectionState; + + final Map defaultSdpConstraints = { + 'mandatory': { + 'OfferToReceiveAudio': true, + 'OfferToReceiveVideo': true, + }, + 'optional': [], + }; + + @override + RTCSignalingState? get signalingState => _signalingState; + + @override + Future getSignalingState() async { + try { + final response = + await WebRTC.invokeMethod('getSignalingState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _signalingState = signalingStateForString(response['state']); + return _signalingState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getSignalingState: ${e.message}'; + } + } + + @override + RTCIceGatheringState? get iceGatheringState => _iceGatheringState; + + @override + Future getIceGatheringState() async { + try { + final response = + await WebRTC.invokeMethod('getIceGatheringState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _iceGatheringState = iceGatheringStateforString(response['state']); + return _iceGatheringState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getIceGatheringState: ${e.message}'; + } + } + + @override + RTCIceConnectionState? get iceConnectionState => _iceConnectionState; + + @override + Future getIceConnectionState() async { + try { + final response = + await WebRTC.invokeMethod('getIceConnectionState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _iceConnectionState = iceConnectionStateForString(response['state']); + return _iceConnectionState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getIceConnectionState: ${e.message}'; + } + } + + @override + RTCPeerConnectionState? get connectionState => _connectionState; + + @override + Future getConnectionState() async { + try { + final response = + await WebRTC.invokeMethod('getConnectionState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _connectionState = peerConnectionStateForString(response['state']); + return _connectionState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getConnectionState: ${e.message}'; + } + } + + Future get localDescription => getLocalDescription(); + + Future get remoteDescription => + getRemoteDescription(); + + /* + * PeerConnection event listener. + */ + void eventListener(dynamic event) { + final Map map = event; + + switch (map['event']) { + case 'signalingState': + _signalingState = signalingStateForString(map['state']); + onSignalingState?.call(_signalingState!); + break; + case 'peerConnectionState': + _connectionState = peerConnectionStateForString(map['state']); + onConnectionState?.call(_connectionState!); + break; + case 'iceGatheringState': + _iceGatheringState = iceGatheringStateforString(map['state']); + onIceGatheringState?.call(_iceGatheringState!); + break; + case 'iceConnectionState': + _iceConnectionState = iceConnectionStateForString(map['state']); + onIceConnectionState?.call(_iceConnectionState!); + break; + case 'onCandidate': + Map cand = map['candidate']; + var candidate = RTCIceCandidate( + cand['candidate'], cand['sdpMid'], cand['sdpMLineIndex']); + onIceCandidate?.call(candidate); + break; + case 'onAddStream': + String streamId = map['streamId']; + + var stream = + _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { + var newStream = MediaStreamNative(streamId, _peerConnectionId); + newStream.setMediaTracks(map['audioTracks'], map['videoTracks']); + return newStream; + }); + + onAddStream?.call(stream); + _remoteStreams.add(stream); + break; + case 'onRemoveStream': + String streamId = map['streamId']; + + for (var item in _remoteStreams) { + if (item.id == streamId) { + onRemoveStream?.call(item); + break; + } + } + _remoteStreams.removeWhere((it) => it.id == streamId); + break; + case 'onAddTrack': + String streamId = map['streamId']; + Map track = map['track']; + + var newTrack = MediaStreamTrackNative( + track['id'], + track['label'], + track['kind'], + track['enabled'], + _peerConnectionId, + track['settings'] ?? {}); + String kind = track['kind']; + + var stream = + _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { + var newStream = MediaStreamNative(streamId, _peerConnectionId); + _remoteStreams.add(newStream); + return newStream; + }); + + var oldTracks = (kind == 'audio') + ? stream.getAudioTracks() + : stream.getVideoTracks(); + var oldTrack = oldTracks.isNotEmpty ? oldTracks[0] : null; + if (oldTrack != null) { + stream.removeTrack(oldTrack, removeFromNative: false); + onRemoveTrack?.call(stream, oldTrack); + } + + stream.addTrack(newTrack, addToNative: false); + onAddTrack?.call(stream, newTrack); + break; + case 'onRemoveTrack': + String trackId = map['trackId']; + for (var stream in _remoteStreams) { + stream.getTracks().forEach((track) { + if (track.id == trackId) { + onRemoveTrack?.call(stream, track); + stream.removeTrack(track, removeFromNative: false); + return; + } + }); + } + break; + case 'didOpenDataChannel': + int dataChannelId = map['id']; + String label = map['label']; + String flutterId = map['flutterId']; + _dataChannel = RTCDataChannelNative( + _peerConnectionId, label, dataChannelId, flutterId, + state: RTCDataChannelState.RTCDataChannelOpen); + onDataChannel?.call(_dataChannel!); + break; + case 'onRenegotiationNeeded': + onRenegotiationNeeded?.call(); + break; + + /// Unified-Plan + case 'onTrack': + var params = map['streams'] as List; + var streams = params.map((e) => MediaStreamNative.fromMap(e)).toList(); + var transceiver = map['transceiver'] != null + ? RTCRtpTransceiverNative.fromMap(map['transceiver'], + peerConnectionId: _peerConnectionId) + : null; + onTrack?.call(RTCTrackEvent( + receiver: RTCRtpReceiverNative.fromMap(map['receiver'], + peerConnectionId: _peerConnectionId), + streams: streams, + track: + MediaStreamTrackNative.fromMap(map['track'], _peerConnectionId), + transceiver: transceiver)); + break; + + /// Other + case 'onSelectedCandidatePairChanged': + + /// class RTCIceCandidatePair { + /// RTCIceCandidatePair(this.local, this.remote, this.lastReceivedMs, this.reason); + /// factory RTCIceCandidatePair.fromMap(Map map) { + /// return RTCIceCandidatePair( + /// RTCIceCandidate.fromMap(map['local']), + /// RTCIceCandidate.fromMap(map['remote']), + /// map['lastReceivedMs'], + /// map['reason']); + /// } + /// RTCIceCandidate local; + /// RTCIceCandidate remote; + /// int lastReceivedMs; + /// String reason; + /// } + /// + /// typedef SelectedCandidatePairChangedCallback = void Function(RTCIceCandidatePair pair); + /// SelectedCandidatePairChangedCallback onSelectedCandidatePairChanged; + /// + /// RTCIceCandidatePair iceCandidatePair = RTCIceCandidatePair.fromMap(map); + /// onSelectedCandidatePairChanged?.call(iceCandidatePair); + + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) throw obj; + } + + @override + Future dispose() async { + await _eventSubscription?.cancel(); + await WebRTC.invokeMethod( + 'peerConnectionDispose', + {'peerConnectionId': _peerConnectionId}, + ); + } + + EventChannel _eventChannelFor(String peerConnectionId) { + return EventChannel('FlutterWebRTC/peerConnectionEvent$peerConnectionId'); + } + + @override + Map get getConfiguration => _configuration; + + @override + Future setConfiguration(Map configuration) async { + _configuration = configuration; + try { + await WebRTC.invokeMethod('setConfiguration', { + 'peerConnectionId': _peerConnectionId, + 'configuration': configuration, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::setConfiguration: ${e.message}'; + } + } + + @override + Future createOffer( + [Map? constraints]) async { + try { + final response = + await WebRTC.invokeMethod('createOffer', { + 'peerConnectionId': _peerConnectionId, + 'constraints': constraints ?? defaultSdpConstraints + }); + + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createOffer: ${e.message}'; + } + } + + @override + Future createAnswer( + [Map? constraints]) async { + try { + final response = + await WebRTC.invokeMethod('createAnswer', { + 'peerConnectionId': _peerConnectionId, + 'constraints': constraints ?? defaultSdpConstraints + }); + + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createAnswer: ${e.message}'; + } + } + + @override + Future addStream(MediaStream stream) async { + _localStreams.add(stream); + await WebRTC.invokeMethod('addStream', { + 'peerConnectionId': _peerConnectionId, + 'streamId': stream.id, + }); + } + + @override + Future removeStream(MediaStream stream) async { + _localStreams.removeWhere((it) => it.id == stream.id); + await WebRTC.invokeMethod('removeStream', { + 'peerConnectionId': _peerConnectionId, + 'streamId': stream.id, + }); + } + + @override + Future setLocalDescription(RTCSessionDescription description) async { + try { + await WebRTC.invokeMethod('setLocalDescription', { + 'peerConnectionId': _peerConnectionId, + 'description': description.toMap(), + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::setLocalDescription: ${e.message}'; + } + } + + @override + Future setRemoteDescription(RTCSessionDescription description) async { + try { + await WebRTC.invokeMethod('setRemoteDescription', { + 'peerConnectionId': _peerConnectionId, + 'description': description.toMap(), + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::setRemoteDescription: ${e.message}'; + } + } + + @override + Future getLocalDescription() async { + try { + final response = + await WebRTC.invokeMethod('getLocalDescription', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getLocalDescription: ${e.message}'; + } + } + + @override + Future getRemoteDescription() async { + try { + final response = + await WebRTC.invokeMethod('getRemoteDescription', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getRemoteDescription: ${e.message}'; + } + } + + @override + Future addCandidate(RTCIceCandidate candidate) async { + try { + await WebRTC.invokeMethod('addCandidate', { + 'peerConnectionId': _peerConnectionId, + 'candidate': candidate.toMap(), + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addCandidate: ${e.message}'; + } + } + + @override + Future> getStats([MediaStreamTrack? track]) async { + try { + final response = await WebRTC.invokeMethod('getStats', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track?.id + }); + + var stats = []; + if (response != null) { + List reports = response['stats']; + for (var report in reports) { + stats.add(StatsReport(report['id'], report['type'], + (report['timestamp'] as num).toDouble(), report['values'])); + } + } + return stats; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getStats: ${e.message}'; + } + } + + @override + List getLocalStreams() { + return _localStreams; + } + + @override + List getRemoteStreams() { + return _remoteStreams; + } + + @override + Future createDataChannel( + String label, RTCDataChannelInit dataChannelDict) async { + try { + final response = + await WebRTC.invokeMethod('createDataChannel', { + 'peerConnectionId': _peerConnectionId, + 'label': label, + 'dataChannelDict': dataChannelDict.toMap() + }); + + _dataChannel = RTCDataChannelNative( + _peerConnectionId, label, response['id'], response['flutterId']); + return _dataChannel!; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createDataChannel: ${e.message}'; + } + } + + @override + RTCDTMFSender createDtmfSender(MediaStreamTrack track) { + return RTCDTMFSenderNative(_peerConnectionId, ''); + } + + @override + Future restartIce() async { + try { + await WebRTC.invokeMethod('restartIce', { + 'peerConnectionId': _peerConnectionId, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::resartIce: ${e.message}'; + } + } + + @override + Future close() async { + try { + await WebRTC.invokeMethod('peerConnectionClose', { + 'peerConnectionId': _peerConnectionId, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::close: ${e.message}'; + } + } + + /// Unified-Plan. + @override + Future> getSenders() async { + try { + final response = await WebRTC.invokeMethod('getSenders', + {'peerConnectionId': _peerConnectionId}); + return RTCRtpSenderNative.fromMaps(response['senders'], + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future> getReceivers() async { + try { + final response = await WebRTC.invokeMethod('getReceivers', + {'peerConnectionId': _peerConnectionId}); + return RTCRtpReceiverNative.fromMaps(response['receivers'], + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future> getTransceivers() async { + try { + final response = await WebRTC.invokeMethod('getTransceivers', + {'peerConnectionId': _peerConnectionId}); + return RTCRtpTransceiverNative.fromMaps(response['transceivers'], + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future addTrack(MediaStreamTrack track, + [MediaStream? stream]) async { + try { + final response = await WebRTC.invokeMethod('addTrack', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id, + 'streamIds': [stream?.id] + }); + return RTCRtpSenderNative.fromMap(response, + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future removeTrack(RTCRtpSender sender) async { + try { + final response = await WebRTC.invokeMethod( + 'removeTrack', { + 'peerConnectionId': _peerConnectionId, + 'senderId': sender.senderId + }); + bool result = response['result']; + + if (result && (sender is RTCRtpSenderNative)) { + sender.removeTrackReference(); + } + + return result; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}'; + } + } + + @override + Future addTransceiver( + {MediaStreamTrack? track, + RTCRtpMediaType? kind, + RTCRtpTransceiverInit? init}) async { + try { + final response = + await WebRTC.invokeMethod('addTransceiver', { + 'peerConnectionId': _peerConnectionId, + if (track != null) 'trackId': track.id, + if (kind != null) 'mediaType': typeRTCRtpMediaTypetoString[kind], + if (init != null) + 'transceiverInit': RTCRtpTransceiverInitNative.initToMap(init) + }); + return RTCRtpTransceiverNative.fromMap(response, + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; + } + } +} diff --git a/lib/src/native/rtc_rtp_receiver_impl.dart b/lib/src/native/rtc_rtp_receiver_impl.dart new file mode 100644 index 0000000000..4b3e8a6101 --- /dev/null +++ b/lib/src/native/rtc_rtp_receiver_impl.dart @@ -0,0 +1,68 @@ +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class RTCRtpReceiverNative extends RTCRtpReceiver { + RTCRtpReceiverNative( + this._id, this._track, this._parameters, this._peerConnectionId); + + factory RTCRtpReceiverNative.fromMap(Map map, + {required String peerConnectionId}) { + var track = MediaStreamTrackNative.fromMap(map['track'], peerConnectionId); + var parameters = RTCRtpParameters.fromMap(map['rtpParameters']); + return RTCRtpReceiverNative( + map['receiverId'], track, parameters, peerConnectionId); + } + + static List fromMaps(List map, + {required String peerConnectionId}) { + return map + .map((e) => + RTCRtpReceiverNative.fromMap(e, peerConnectionId: peerConnectionId)) + .toList(); + } + + @override + Future> getStats() async { + try { + final response = await WebRTC.invokeMethod('getStats', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id + }); + var stats = []; + if (response != null) { + List reports = response['stats']; + for (var report in reports) { + stats.add(StatsReport(report['id'], report['type'], + report['timestamp'], report['values'])); + } + } + return stats; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpReceiverNative::getStats: ${e.message}'; + } + } + + /// private: + String _id; + String _peerConnectionId; + MediaStreamTrack _track; + RTCRtpParameters _parameters; + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack get track => _track; + + @override + String get receiverId => _id; + + String get peerConnectionId => _peerConnectionId; +} diff --git a/lib/src/native/rtc_rtp_sender_impl.dart b/lib/src/native/rtc_rtp_sender_impl.dart new file mode 100644 index 0000000000..81fb450307 --- /dev/null +++ b/lib/src/native/rtc_rtp_sender_impl.dart @@ -0,0 +1,159 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; +import 'utils.dart'; + +class RTCRtpSenderNative extends RTCRtpSender { + RTCRtpSenderNative(this._id, this._track, this._dtmf, this._parameters, + this._ownsTrack, this._peerConnectionId); + + factory RTCRtpSenderNative.fromMap(Map map, + {required String peerConnectionId}) { + Map trackMap = map['track']; + return RTCRtpSenderNative( + map['senderId'], + (trackMap.isNotEmpty) + ? MediaStreamTrackNative.fromMap(map['track'], peerConnectionId) + : null, + RTCDTMFSenderNative(peerConnectionId, map['senderId']), + RTCRtpParameters.fromMap(map['rtpParameters']), + map['ownsTrack'], + peerConnectionId); + } + + static List fromMaps(List map, + {required String peerConnectionId}) { + return map + .map((e) => + RTCRtpSenderNative.fromMap(e, peerConnectionId: peerConnectionId)) + .toList(); + } + + String _peerConnectionId; + String _id; + MediaStreamTrack? _track; + final Set _streams = {}; + RTCDTMFSender _dtmf; + RTCRtpParameters _parameters; + bool _ownsTrack = false; + + @override + Future> getStats() async { + try { + final response = await WebRTC.invokeMethod('getStats', { + 'peerConnectionId': _peerConnectionId, + if (track != null) 'trackId': track!.id, + }); + var stats = []; + if (response != null) { + List reports = response['stats']; + for (var report in reports) { + stats.add(StatsReport(report['id'], report['type'], + report['timestamp'], report['values'])); + } + } + return stats; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::getStats: ${e.message}'; + } + } + + @override + Future setParameters(RTCRtpParameters parameters) async { + _parameters = parameters; + try { + final response = + await WebRTC.invokeMethod('rtpSenderSetParameters', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'parameters': parameters.toMap() + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setParameters: ${e.message}'; + } + } + + @override + Future replaceTrack(MediaStreamTrack? track) async { + try { + await WebRTC.invokeMethod('rtpSenderReplaceTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track != null ? track.id : '' + }); + + // change reference of associated MediaTrack + _track = track; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::replaceTrack: ${e.message}'; + } + } + + @override + Future setTrack(MediaStreamTrack? track, + {bool takeOwnership = true}) async { + try { + await WebRTC.invokeMethod('rtpSenderSetTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track != null ? track.id : '', + 'takeOwnership': takeOwnership, + }); + + // change reference of associated MediaTrack + _track = track; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setTrack: ${e.message}'; + } + } + + @override + Future setStreams(List streams) async { + try { + await WebRTC.invokeMethod('rtpSenderSetStreams', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'streamIds': streams.map((e) => e.id).toList(), + }); + + // change reference of associated MediaTrack + _streams.addAll(streams); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setTrack: ${e.message}'; + } + } + + void removeTrackReference() { + _track = null; + } + + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack? get track => _track; + + @override + String get senderId => _id; + + @override + bool get ownsTrack => _ownsTrack; + + @override + RTCDTMFSender get dtmfSender => _dtmf; + + String get peerConnectionId => _peerConnectionId; + + @Deprecated( + 'No need to dispose rtpSender as it is handled by peerConnection.') + @override + @mustCallSuper + Future dispose() async {} +} diff --git a/lib/src/native/rtc_rtp_transceiver_impl.dart b/lib/src/native/rtc_rtp_transceiver_impl.dart new file mode 100644 index 0000000000..7bc4e19090 --- /dev/null +++ b/lib/src/native/rtc_rtp_transceiver_impl.dart @@ -0,0 +1,183 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'utils.dart'; + +List listToRtpEncodings(List> list) { + return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); +} + +class RTCRtpTransceiverInitNative extends RTCRtpTransceiverInit { + RTCRtpTransceiverInitNative(TransceiverDirection direction, + List streams, List sendEncodings) + : super( + direction: direction, + streams: streams, + sendEncodings: sendEncodings); + + factory RTCRtpTransceiverInitNative.fromMap(Map map) { + return RTCRtpTransceiverInitNative( + typeStringToRtpTransceiverDirection[map['direction']]!, + (map['streams'] as List) + .map((e) => MediaStreamNative.fromMap(map)) + .toList(), + listToRtpEncodings(map['sendEncodings'])); + } + + Map toMap() { + return { + 'direction': typeRtpTransceiverDirectionToString[direction], + if (streams != null) 'streamIds': streams!.map((e) => e.id).toList(), + if (sendEncodings != null) + 'sendEncodings': sendEncodings!.map((e) => e.toMap()).toList(), + }; + } + + static Map initToMap(RTCRtpTransceiverInit init) { + return { + 'direction': typeRtpTransceiverDirectionToString[init.direction], + if (init.streams != null) + 'streamIds': init.streams!.map((e) => e.id).toList(), + if (init.sendEncodings != null) + 'sendEncodings': init.sendEncodings!.map((e) => e.toMap()).toList(), + }; + } +} + +class RTCRtpTransceiverNative extends RTCRtpTransceiver { + RTCRtpTransceiverNative( + this._id, + this._direction, + this._mid, + this._sender, + this._receiver, + this._peerConnectionId, + ); + + factory RTCRtpTransceiverNative.fromMap(Map map, + {required String peerConnectionId}) { + var transceiver = RTCRtpTransceiverNative( + map['transceiverId'] ?? '', + typeStringToRtpTransceiverDirection[map['direction']]!, + map['mid'] ?? '', + RTCRtpSenderNative.fromMap(map['sender'], + peerConnectionId: peerConnectionId), + RTCRtpReceiverNative.fromMap(map['receiver'], + peerConnectionId: peerConnectionId), + peerConnectionId); + return transceiver; + } + + static List fromMaps(List map, + {required String peerConnectionId}) { + return map + .map((e) => RTCRtpTransceiverNative.fromMap(e, + peerConnectionId: peerConnectionId)) + .toList(); + } + + String _peerConnectionId; + String _id; + bool _stop = false; + TransceiverDirection _direction; + String _mid; + RTCRtpSender _sender; + RTCRtpReceiver _receiver; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + @override + String get mid => _mid; + + @override + RTCRtpSender get sender => _sender; + + @override + RTCRtpReceiver get receiver => _receiver; + + @override + bool get stoped => _stop; + + @override + String get transceiverId => _id; + + @override + Future setDirection(TransceiverDirection direction) async { + try { + await WebRTC.invokeMethod('rtpTransceiverSetDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id, + 'direction': typeRtpTransceiverDirectionToString[direction] + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::setDirection: ${e.message}'; + } + } + + @override + Future getCurrentDirection() async { + try { + final response = await WebRTC.invokeMethod( + 'rtpTransceiverGetCurrentDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + return response != null + ? typeStringToRtpTransceiverDirection[response['result']] + : null; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::getCurrentDirection: ${e.message}'; + } + } + + @override + Future getDirection() async { + try { + final response = await WebRTC.invokeMethod( + 'rtpTransceiverGetDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + + _direction = typeStringToRtpTransceiverDirection[response['result']]!; + return _direction; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::getDirection: ${e.message}'; + } + } + + @override + Future stop() async { + try { + await WebRTC.invokeMethod('rtpTransceiverStop', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + + _stop = true; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::stop: ${e.message}'; + } + } + + @override + Future setCodecPreferences(List codecs) async { + try { + await WebRTC.invokeMethod('setCodecPreferences', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id, + 'codecs': codecs.map((e) => e.toMap()).toList() + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::setCodecPreferences: ${e.message}'; + } + } +} diff --git a/lib/src/native/rtc_track_event_impl.dart b/lib/src/native/rtc_track_event_impl.dart new file mode 100644 index 0000000000..760afb15d2 --- /dev/null +++ b/lib/src/native/rtc_track_event_impl.dart @@ -0,0 +1,30 @@ +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; + +class RTCTrackEventNative extends RTCTrackEvent { + RTCTrackEventNative(RTCRtpReceiver receiver, List streams, + MediaStreamTrack track, RTCRtpTransceiver transceiver) + : super( + receiver: receiver, + streams: streams, + track: track, + transceiver: transceiver); + + factory RTCTrackEventNative.fromMap( + Map map, String peerConnectionId) { + var streamsParams = map['streams'] as List>; + var streams = + streamsParams.map((e) => MediaStreamNative.fromMap(e)).toList(); + return RTCTrackEventNative( + RTCRtpReceiverNative.fromMap(map['receiver'], + peerConnectionId: peerConnectionId), + streams, + MediaStreamTrackNative.fromMap(map['track'], peerConnectionId), + RTCRtpTransceiverNative.fromMap(map['transceiver'], + peerConnectionId: peerConnectionId)); + } +} diff --git a/lib/src/native/rtc_video_platform_view.dart b/lib/src/native/rtc_video_platform_view.dart new file mode 100644 index 0000000000..0dd5cb6bcf --- /dev/null +++ b/lib/src/native/rtc_video_platform_view.dart @@ -0,0 +1,102 @@ +import 'dart:math'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter/widgets.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_video_platform_view_controller.dart'; + +class RTCVideoPlatFormView extends StatefulWidget { + const RTCVideoPlatFormView({ + super.key, + required this.onViewReady, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + }); + final void Function(RTCVideoPlatformViewController)? onViewReady; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + @override + NativeVideoPlayerViewState createState() => NativeVideoPlayerViewState(); +} + +class NativeVideoPlayerViewState extends State { + RTCVideoPlatformViewController? _controller; + bool _showVideoView = false; + @override + void dispose() { + _controller?.onFirstFrameRendered = null; + _controller?.onSrcObjectChange = null; + _controller?.onResize = null; + _controller = null; + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) => + _buildVideoView(context, constraints)); + } + + Widget _buildVideoView(BuildContext context, BoxConstraints constraints) { + return Center( + child: FittedBox( + clipBehavior: Clip.hardEdge, + fit: widget.objectFit == + RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? BoxFit.contain + : BoxFit.cover, + child: Center( + child: SizedBox( + width: _showVideoView + ? widget.objectFit == + RTCVideoViewObjectFit.RTCVideoViewObjectFitCover + ? constraints.maxWidth + : constraints.maxHeight * + (_controller?.value.aspectRatio ?? 1.0) + : 0.1, + height: _showVideoView ? constraints.maxHeight : 0.1, + child: Transform( + transform: Matrix4.identity()..rotateY(widget.mirror ? -pi : 0.0), + alignment: FractionalOffset.center, + child: _buildNativeView(), + ), + ), + ), + ), + ); + } + + Widget _buildNativeView() { + const viewType = 'rtc_video_platform_view'; + if (defaultTargetPlatform == TargetPlatform.iOS) { + return UiKitView( + viewType: viewType, + onPlatformViewCreated: onPlatformViewCreated, + creationParams: {}, + creationParamsCodec: const StandardMessageCodec(), + ); + } + return Text('RTCVideoPlatformView only support for iOS.'); + } + + void showVideoView(bool show) { + if (mounted) { + _showVideoView = show; + setState(() {}); + } + } + + Future onPlatformViewCreated(int id) async { + final controller = RTCVideoPlatformViewController(id); + _controller = controller; + controller.onFirstFrameRendered = () => showVideoView(true); + controller.onSrcObjectChange = () => showVideoView(false); + controller.onResize = () => showVideoView(true); + widget.onViewReady?.call(controller); + await _controller?.initialize(); + } +} diff --git a/lib/src/native/rtc_video_platform_view_controller.dart b/lib/src/native/rtc_video_platform_view_controller.dart new file mode 100644 index 0000000000..e9eeb1d51c --- /dev/null +++ b/lib/src/native/rtc_video_platform_view_controller.dart @@ -0,0 +1,183 @@ +import 'dart:async'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../helper.dart'; +import 'utils.dart'; + +class RTCVideoPlatformViewController extends ValueNotifier + implements VideoRenderer { + RTCVideoPlatformViewController(int viewId) : super(RTCVideoValue.empty) { + _viewId = viewId; + } + int? _viewId; + bool _disposed = false; + MediaStream? _srcObject; + StreamSubscription? _eventSubscription; + + @override + Future initialize() async { + _eventSubscription?.cancel(); + _eventSubscription = EventChannel('FlutterWebRTC/PlatformViewId$_viewId') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + @override + int get videoWidth => value.width.toInt(); + + @override + int get videoHeight => value.height.toInt(); + + @override + int? get textureId => _viewId; + + @override + MediaStream? get srcObject => _srcObject; + + @override + Function? onResize; + + @override + Function? onFirstFrameRendered; + + Function? onSrcObjectChange; + + @override + set srcObject(MediaStream? stream) { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoPlatformController is disposed'; + } + if (_viewId == null) throw 'Call initialize before setting the stream'; + if (_srcObject == stream) return; + _srcObject = stream; + onSrcObjectChange?.call(); + WebRTC.invokeMethod( + 'videoPlatformViewRendererSetSrcObject', { + 'viewId': _viewId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '' + }).then((_) { + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + }).catchError((e) { + print( + 'Got exception for RTCVideoPlatformController::setSrcObject: ${e.message}'); + }, test: (e) => e is PlatformException); + } + + Future setSrcObject({MediaStream? stream, String? trackId}) async { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoPlatformController is disposed'; + } + if (_viewId == null) throw 'Call initialize before setting the stream'; + if (_srcObject == stream) return; + _srcObject = stream; + onSrcObjectChange?.call(); + var oldviewId = _viewId; + try { + await WebRTC.invokeMethod( + 'videoPlatformViewRendererSetSrcObject', { + 'viewId': _viewId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '', + 'trackId': trackId ?? '0' + }); + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + } on PlatformException catch (e) { + throw 'Got exception for RTCVideoPlatformController::setSrcObject: viewId $oldviewId [disposed: $_disposed] with stream ${stream?.id}, error: ${e.message}'; + } + } + + @override + Future dispose() async { + if (_disposed) return; + await _eventSubscription?.cancel(); + _eventSubscription = null; + if (_viewId != null) { + try { + await WebRTC.invokeMethod( + 'videoPlatformViewRendererDispose', { + 'viewId': _viewId, + }); + _viewId = null; + } on PlatformException catch (e) { + throw 'Failed to RTCVideoPlatformController::dispose: ${e.message}'; + } + } + _disposed = true; + super.dispose(); + } + + void eventListener(dynamic event) { + if (_disposed) return; + final Map map = event; + switch (map['event']) { + case 'didPlatformViewChangeRotation': + value = + value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); + onResize?.call(); + break; + case 'didPlatformViewChangeVideoSize': + value = value.copyWith( + width: 0.0 + map['width'], + height: 0.0 + map['height'], + renderVideo: renderVideo); + onResize?.call(); + break; + case 'didFirstFrameRendered': + value = value.copyWith(renderVideo: renderVideo); + onFirstFrameRendered?.call(); + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + bool get renderVideo => _viewId != null && _srcObject != null; + + @override + bool get muted => _srcObject?.getAudioTracks()[0].muted ?? true; + + @override + set muted(bool mute) { + if (_disposed) { + throw Exception( + 'Can\'t be muted: The RTCVideoPlatformController is disposed'); + } + if (_srcObject == null) { + throw Exception('Can\'t be muted: The MediaStream is null'); + } + if (_srcObject!.ownerTag != 'local') { + throw Exception( + 'You\'re trying to mute a remote track, this is not supported'); + } + if (_srcObject!.getAudioTracks().isEmpty) { + throw Exception('Can\'t be muted: The MediaStreamTrack(audio) is empty'); + } + + Helper.setMicrophoneMute(mute, _srcObject!.getAudioTracks()[0]); + } + + @override + Future audioOutput(String deviceId) async { + try { + await Helper.selectAudioOutput(deviceId); + } catch (e) { + print('Helper.selectAudioOutput ${e.toString()}'); + return false; + } + return true; + } +} diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart new file mode 100644 index 0000000000..c2a46cba75 --- /dev/null +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -0,0 +1,179 @@ +import 'dart:async'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../helper.dart'; +import 'utils.dart'; + +class RTCVideoRenderer extends ValueNotifier + implements VideoRenderer { + RTCVideoRenderer() : super(RTCVideoValue.empty); + Completer? _initializing; + int? _textureId; + bool _disposed = false; + MediaStream? _srcObject; + StreamSubscription? _eventSubscription; + + @override + Future initialize() async { + if (_initializing != null) { + await _initializing!.future; + return; + } + _initializing = Completer(); + final response = await WebRTC.invokeMethod('createVideoRenderer', {}); + _textureId = response['textureId']; + _eventSubscription = EventChannel('FlutterWebRTC/Texture$textureId') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + _initializing!.complete(null); + } + + @override + int get videoWidth => value.width.toInt(); + + @override + int get videoHeight => value.height.toInt(); + + @override + int? get textureId => _textureId; + + @override + MediaStream? get srcObject => _srcObject; + + @override + Function? onResize; + + @override + Function? onFirstFrameRendered; + + @override + set srcObject(MediaStream? stream) { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoRenderer is disposed'; + } + if (textureId == null) throw 'Call initialize before setting the stream'; + _srcObject = stream; + WebRTC.invokeMethod('videoRendererSetSrcObject', { + 'textureId': textureId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '' + }).then((_) { + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + }).catchError((e) { + print('Got exception for RTCVideoRenderer::setSrcObject: ${e.message}'); + }, test: (e) => e is PlatformException); + } + + Future setSrcObject({MediaStream? stream, String? trackId}) async { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoRenderer is disposed'; + } + if (_textureId == null) throw 'Call initialize before setting the stream'; + _srcObject = stream; + var oldTextureId = _textureId; + try { + await WebRTC.invokeMethod('videoRendererSetSrcObject', { + 'textureId': _textureId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '', + 'trackId': trackId ?? '0' + }); + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + } on PlatformException catch (e) { + throw 'Got exception for RTCVideoRenderer::setSrcObject: textureId $oldTextureId [disposed: $_disposed] with stream ${stream?.id}, error: ${e.message}'; + } + } + + @override + Future dispose() async { + if (_disposed) return; + await _eventSubscription?.cancel(); + _eventSubscription = null; + if (_textureId != null) { + try { + await WebRTC.invokeMethod('videoRendererDispose', { + 'textureId': _textureId, + }); + _textureId = null; + _disposed = true; + } on PlatformException catch (e) { + throw 'Failed to RTCVideoRenderer::dispose: ${e.message}'; + } + } + + return super.dispose(); + } + + void eventListener(dynamic event) { + if (_disposed) return; + final Map map = event; + switch (map['event']) { + case 'didTextureChangeRotation': + value = + value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); + onResize?.call(); + break; + case 'didTextureChangeVideoSize': + value = value.copyWith( + width: 0.0 + map['width'], + height: 0.0 + map['height'], + renderVideo: renderVideo); + onResize?.call(); + break; + case 'didFirstFrameRendered': + value = value.copyWith(renderVideo: renderVideo); + onFirstFrameRendered?.call(); + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + bool get renderVideo => _textureId != null && _srcObject != null; + + @override + bool get muted => _srcObject?.getAudioTracks()[0].muted ?? true; + + @override + set muted(bool mute) { + if (_disposed) { + throw Exception('Can\'t be muted: The RTCVideoRenderer is disposed'); + } + if (_srcObject == null) { + throw Exception('Can\'t be muted: The MediaStream is null'); + } + if (_srcObject!.ownerTag != 'local') { + throw Exception( + 'You\'re trying to mute a remote track, this is not supported'); + } + if (_srcObject!.getAudioTracks().isEmpty) { + throw Exception('Can\'t be muted: The MediaStreamTrack(audio) is empty'); + } + + Helper.setMicrophoneMute(mute, _srcObject!.getAudioTracks()[0]); + } + + @override + Future audioOutput(String deviceId) async { + try { + await Helper.selectAudioOutput(deviceId); + } catch (e) { + print('Helper.selectAudioOutput ${e.toString()}'); + return false; + } + return true; + } +} diff --git a/lib/src/native/rtc_video_view_impl.dart b/lib/src/native/rtc_video_view_impl.dart new file mode 100644 index 0000000000..9d236143c1 --- /dev/null +++ b/lib/src/native/rtc_video_view_impl.dart @@ -0,0 +1,71 @@ +import 'dart:math'; + +import 'package:flutter/material.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_video_renderer_impl.dart'; + +class RTCVideoView extends StatelessWidget { + RTCVideoView( + this._renderer, { + super.key, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + this.filterQuality = FilterQuality.low, + this.placeholderBuilder, + }); + + final RTCVideoRenderer _renderer; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + final FilterQuality filterQuality; + final WidgetBuilder? placeholderBuilder; + + RTCVideoRenderer get videoRenderer => _renderer; + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) => + _buildVideoView(context, constraints)); + } + + Widget _buildVideoView(BuildContext context, BoxConstraints constraints) { + return Center( + child: Container( + width: constraints.maxWidth, + height: constraints.maxHeight, + child: FittedBox( + clipBehavior: Clip.hardEdge, + fit: objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? BoxFit.contain + : BoxFit.cover, + child: Center( + child: ValueListenableBuilder( + valueListenable: videoRenderer, + builder: + (BuildContext context, RTCVideoValue value, Widget? child) { + return SizedBox( + width: constraints.maxHeight * value.aspectRatio, + height: constraints.maxHeight, + child: child, + ); + }, + child: Transform( + transform: Matrix4.identity()..rotateY(mirror ? -pi : 0.0), + alignment: FractionalOffset.center, + child: videoRenderer.renderVideo + ? Texture( + textureId: videoRenderer.textureId!, + filterQuality: filterQuality, + ) + : placeholderBuilder?.call(context) ?? Container(), + ), + ), + ), + ), + ), + ); + } +} diff --git a/lib/src/native/utils.dart b/lib/src/native/utils.dart new file mode 100644 index 0000000000..362e7917cd --- /dev/null +++ b/lib/src/native/utils.dart @@ -0,0 +1,61 @@ +import 'dart:io'; + +import 'package:flutter/services.dart'; + +class WebRTC { + static const MethodChannel _channel = MethodChannel('FlutterWebRTC.Method'); + + static bool get platformIsDesktop => + Platform.isWindows || Platform.isMacOS || Platform.isLinux; + + static bool get platformIsWindows => Platform.isWindows; + + static bool get platformIsMacOS => Platform.isMacOS; + + static bool get platformIsLinux => Platform.isLinux; + + static bool get platformIsMobile => Platform.isIOS || Platform.isAndroid; + + static bool get platformIsIOS => Platform.isIOS; + + static bool get platformIsAndroid => Platform.isAndroid; + + static bool get platformIsWeb => false; + + static Future invokeMethod(String methodName, + [dynamic param]) async { + await initialize(); + + return _channel.invokeMethod( + methodName, + param, + ); + } + + static bool initialized = false; + + /// Initialize the WebRTC plugin. If this is not manually called, will be + /// initialized with default settings. + /// + /// Params: + /// + /// "networkIgnoreMask": a list of AdapterType objects converted to string with `.value` + /// + /// Android specific params: + /// + /// "forceSWCodec": a boolean that forces software codecs to be used for video. + /// + /// "forceSWCodecList": a list of strings of software codecs that should use software. + /// + /// "androidAudioConfiguration": an AndroidAudioConfiguration object mapped with toMap() + /// + /// "bypassVoiceProcessing": a boolean that bypasses the audio processing for the audio device. + static Future initialize({Map? options}) async { + if (!initialized) { + await _channel.invokeMethod('initialize', { + 'options': options ?? {}, + }); + initialized = true; + } + } +} diff --git a/lib/src/rtc_data_channel.dart b/lib/src/rtc_data_channel.dart deleted file mode 100644 index b8f523cd38..0000000000 --- a/lib/src/rtc_data_channel.dart +++ /dev/null @@ -1,184 +0,0 @@ -import 'dart:async'; -import 'dart:typed_data'; - -import 'package:flutter/services.dart'; - -import 'enums.dart'; -import 'utils.dart'; - -final _typeStringToMessageType = { - 'text': MessageType.text, - 'binary': MessageType.binary -}; - -/// Initialization parameters for [RTCDataChannel]. -class RTCDataChannelInit { - bool ordered = true; - int maxRetransmitTime = -1; - int maxRetransmits = -1; - String protocol = 'sctp'; //sctp | quic - bool negotiated = false; - int id = 0; - Map toMap() { - return { - 'ordered': ordered, - 'maxRetransmitTime': maxRetransmitTime, - 'maxRetransmits': maxRetransmits, - 'protocol': protocol, - 'negotiated': negotiated, - 'id': id - }; - } -} - -/// A class that represents a datachannel message. -/// Can either contain binary data as a [Uint8List] or -/// text data as a [String]. -class RTCDataChannelMessage { - /// Construct a text message with a [String]. - RTCDataChannelMessage(String text) { - _data = text; - _isBinary = false; - } - - /// Construct a binary message with a [Uint8List]. - RTCDataChannelMessage.fromBinary(Uint8List binary) { - _data = binary; - _isBinary = true; - } - - dynamic _data; - bool _isBinary; - - /// Tells whether this message contains binary. - /// If this is false, it's a text message. - bool get isBinary => _isBinary; - - MessageType get type => isBinary ? MessageType.binary : MessageType.text; - - /// Text contents of this message as [String]. - /// Use only on text messages. - /// See: [isBinary]. - String get text => _data; - - /// Binary contents of this message as [Uint8List]. - /// Use only on binary messages. - /// See: [isBinary]. - Uint8List get binary => _data; -} - -typedef RTCDataChannelStateCallback = void Function(RTCDataChannelState state); -typedef RTCDataChannelOnMessageCallback = void Function( - RTCDataChannelMessage message); - -/// A class that represents a WebRTC datachannel. -/// Can send and receive text and binary messages. -class RTCDataChannel { - RTCDataChannel(this._peerConnectionId, this._label, this._dataChannelId) { - stateChangeStream = _stateChangeController.stream; - messageStream = _messageController.stream; - _eventSubscription = _eventChannelFor(_peerConnectionId, _dataChannelId) - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - final String _peerConnectionId; - final String _label; - final int _dataChannelId; - RTCDataChannelState _state; - final _channel = WebRTC.methodChannel(); - StreamSubscription _eventSubscription; - - /// Get current state. - RTCDataChannelState get state => _state; - - /// Get label. - String get label => _label; - - /// Event handler for datachannel state changes. - /// Assign this property to listen for state changes. - /// Will be passed one argument, [state], which is an [RTCDataChannelState]. - RTCDataChannelStateCallback onDataChannelState; - - /// Event handler for messages. Assign this property - /// to listen for messages from this [RTCDataChannel]. - /// Will be passed a a [message] argument, which is an [RTCDataChannelMessage] that will contain either - /// binary data as a [Uint8List] or text data as a [String]. - RTCDataChannelOnMessageCallback onMessage; - - final _stateChangeController = - StreamController.broadcast(sync: true); - final _messageController = - StreamController.broadcast(sync: true); - - /// Stream of state change events. Emits the new state on change. - /// Closes when the [RTCDataChannel] is closed. - Stream stateChangeStream; - - /// Stream of incoming messages. Emits the message. - /// Closes when the [RTCDataChannel] is closed. - Stream messageStream; - - /// RTCDataChannel event listener. - void eventListener(dynamic event) { - final Map map = event; - switch (map['event']) { - case 'dataChannelStateChanged': - //int dataChannelId = map['id']; - _state = rtcDataChannelStateForString(map['state']); - onDataChannelState?.call(_state); - - _stateChangeController.add(_state); - break; - case 'dataChannelReceiveMessage': - //int dataChannelId = map['id']; - - var type = _typeStringToMessageType[map['type']]; - dynamic data = map['data']; - RTCDataChannelMessage message; - if (type == MessageType.binary) { - message = RTCDataChannelMessage.fromBinary(data); - } else { - message = RTCDataChannelMessage(data); - } - - onMessage?.call(message); - - _messageController.add(message); - break; - } - } - - EventChannel _eventChannelFor(String peerConnectionId, int dataChannelId) { - return EventChannel( - 'FlutterWebRTC/dataChannelEvent$peerConnectionId$dataChannelId'); - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - /// Send a message to this datachannel. - /// To send a text message, use the default constructor to instantiate a text [RTCDataChannelMessage] - /// for the [message] parameter. - /// To send a binary message, pass a binary [RTCDataChannelMessage] - /// constructed with [RTCDataChannelMessage.fromBinary] - Future send(RTCDataChannelMessage message) async { - await _channel.invokeMethod('dataChannelSend', { - 'peerConnectionId': _peerConnectionId, - 'dataChannelId': _dataChannelId, - 'type': message.isBinary ? 'binary' : 'text', - 'data': message.isBinary ? message.binary : message.text, - }); - } - - Future close() async { - await _stateChangeController.close(); - await _messageController.close(); - await _eventSubscription?.cancel(); - await _channel.invokeMethod('dataChannelClose', { - 'peerConnectionId': _peerConnectionId, - 'dataChannelId': _dataChannelId - }); - } -} diff --git a/lib/src/rtc_dtmf_sender.dart b/lib/src/rtc_dtmf_sender.dart deleted file mode 100644 index 6c42c8992e..0000000000 --- a/lib/src/rtc_dtmf_sender.dart +++ /dev/null @@ -1,30 +0,0 @@ -import 'package:flutter/services.dart'; - -import 'utils.dart'; - -class RTCDTMFSender { - RTCDTMFSender(this._peerConnectionId); - // peer connection Id must be defined as a variable where this function will be called. - final String _peerConnectionId; - final MethodChannel _channel = WebRTC.methodChannel(); - - /// tones:A String containing the DTMF codes to be transmitted to the recipient. - /// Specifying an empty string as the tones parameter clears the tone - /// buffer, aborting any currently queued tones. A "," character inserts - /// a two second delay. - /// duration: This value must be between 40 ms and 6000 ms (6 seconds). - /// The default is 100 ms. - /// interToneGap: The length of time, in milliseconds, to wait between tones. - /// The browser will enforce a minimum value of 30 ms (that is, - /// if you specify a lower value, 30 ms will be used instead); - /// the default is 70 ms. - Future sendDtmf(String tones, - {int duration = 100, int interToneGap = 70}) async { - await _channel.invokeMethod('sendDtmf', { - 'peerConnectionId': _peerConnectionId, - 'tone': tones, - 'duration': duration, - 'gap': interToneGap, - }); - } -} diff --git a/lib/src/rtc_ice_candidate.dart b/lib/src/rtc_ice_candidate.dart deleted file mode 100644 index d9ba2d9606..0000000000 --- a/lib/src/rtc_ice_candidate.dart +++ /dev/null @@ -1,13 +0,0 @@ -class RTCIceCandidate { - RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMlineIndex); - final String candidate; - final String sdpMid; - final int sdpMlineIndex; - dynamic toMap() { - return { - 'candidate': candidate, - 'sdpMid': sdpMid, - 'sdpMLineIndex': sdpMlineIndex - }; - } -} diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart deleted file mode 100644 index 6c090a1a13..0000000000 --- a/lib/src/rtc_peerconnection.dart +++ /dev/null @@ -1,373 +0,0 @@ -import 'dart:async'; - -import 'package:flutter/services.dart'; - -import 'enums.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'rtc_data_channel.dart'; -import 'rtc_dtmf_sender.dart'; -import 'rtc_ice_candidate.dart'; -import 'rtc_session_description.dart'; -import 'rtc_stats_report.dart'; -import 'utils.dart'; - -/* - * Delegate for PeerConnection. - */ -typedef SignalingStateCallback = void Function(RTCSignalingState state); -typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); -typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); -typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); -typedef AddStreamCallback = void Function(MediaStream stream); -typedef RemoveStreamCallback = void Function(MediaStream stream); -typedef AddTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RemoveTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); -typedef RenegotiationNeededCallback = void Function(); - -/* - * PeerConnection - */ -class RTCPeerConnection { - RTCPeerConnection(this._peerConnectionId, this._configuration) { - _eventSubscription = _eventChannelFor(_peerConnectionId) - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - - // private: - final String _peerConnectionId; - final _channel = WebRTC.methodChannel(); - StreamSubscription _eventSubscription; - final _localStreams = []; - final _remoteStreams = []; - RTCDataChannel _dataChannel; - Map _configuration; - RTCSignalingState _signalingState; - RTCIceGatheringState _iceGatheringState; - RTCIceConnectionState _iceConnectionState; - // public: delegate - SignalingStateCallback onSignalingState; - IceGatheringStateCallback onIceGatheringState; - IceConnectionStateCallback onIceConnectionState; - IceCandidateCallback onIceCandidate; - AddStreamCallback onAddStream; - RemoveStreamCallback onRemoveStream; - AddTrackCallback onAddTrack; - RemoveTrackCallback onRemoveTrack; - RTCDataChannelCallback onDataChannel; - RenegotiationNeededCallback onRenegotiationNeeded; - - final Map defaultSdpConstraints = { - 'mandatory': { - 'OfferToReceiveAudio': true, - 'OfferToReceiveVideo': true, - }, - 'optional': [], - }; - - RTCSignalingState get signalingState => _signalingState; - - RTCIceGatheringState get iceGatheringState => _iceGatheringState; - - RTCIceConnectionState get iceConnectionState => _iceConnectionState; - - /* - * PeerConnection event listener. - */ - void eventListener(dynamic event) { - final Map map = event; - - switch (map['event']) { - case 'signalingState': - _signalingState = signalingStateForString(map['state']); - onSignalingState?.call(_signalingState); - break; - case 'iceGatheringState': - _iceGatheringState = iceGatheringStateforString(map['state']); - onIceGatheringState?.call(_iceGatheringState); - break; - case 'iceConnectionState': - _iceConnectionState = iceConnectionStateForString(map['state']); - onIceConnectionState?.call(_iceConnectionState); - break; - case 'onCandidate': - Map cand = map['candidate']; - var candidate = RTCIceCandidate( - cand['candidate'], cand['sdpMid'], cand['sdpMLineIndex']); - onIceCandidate?.call(candidate); - break; - case 'onAddStream': - String streamId = map['streamId']; - - var stream = - _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { - var newStream = MediaStream(streamId, _peerConnectionId); - newStream.setMediaTracks(map['audioTracks'], map['videoTracks']); - return newStream; - }); - - onAddStream?.call(stream); - _remoteStreams.add(stream); - break; - case 'onRemoveStream': - String streamId = map['streamId']; - var stream = - _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { - return null; - }); - onRemoveStream?.call(stream); - _remoteStreams.removeWhere((it) => it.id == streamId); - break; - case 'onAddTrack': - String streamId = map['streamId']; - Map track = map['track']; - - var newTrack = MediaStreamTrack( - map['trackId'], track['label'], track['kind'], track['enabled']); - String kind = track['kind']; - - var stream = - _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { - var newStream = MediaStream(streamId, _peerConnectionId); - _remoteStreams.add(newStream); - return newStream; - }); - - var oldTracks = (kind == 'audio') - ? stream.getAudioTracks() - : stream.getVideoTracks(); - var oldTrack = oldTracks.isNotEmpty ? oldTracks[0] : null; - if (oldTrack != null) { - stream.removeTrack(oldTrack, removeFromNative: false); - onRemoveTrack?.call(stream, oldTrack); - } - - stream.addTrack(newTrack, addToNative: false); - onAddTrack?.call(stream, newTrack); - break; - case 'onRemoveTrack': - String streamId = map['streamId']; - var stream = - _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { - return null; - }); - Map track = map['track']; - var oldTrack = MediaStreamTrack( - map['trackId'], track['label'], track['kind'], track['enabled']); - onRemoveTrack?.call(stream, oldTrack); - break; - case 'didOpenDataChannel': - int dataChannelId = map['id']; - String label = map['label']; - _dataChannel = RTCDataChannel(_peerConnectionId, label, dataChannelId); - onDataChannel?.call(_dataChannel); - break; - case 'onRenegotiationNeeded': - onRenegotiationNeeded?.call(); - break; - } - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - Future dispose() async { - await _eventSubscription?.cancel(); - await _channel.invokeMethod( - 'peerConnectionDispose', - {'peerConnectionId': _peerConnectionId}, - ); - } - - EventChannel _eventChannelFor(String peerConnectionId) { - return EventChannel('FlutterWebRTC/peerConnectoinEvent$peerConnectionId'); - } - - Map get getConfiguration => _configuration; - - Future setConfiguration(Map configuration) async { - _configuration = configuration; - try { - await _channel.invokeMethod('setConfiguration', { - 'peerConnectionId': _peerConnectionId, - 'configuration': configuration, - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::setConfiguration: ${e.message}'; - } - } - - Future createOffer( - [Map constraints = const {}]) async { - try { - final response = await _channel - .invokeMethod>('createOffer', { - 'peerConnectionId': _peerConnectionId, - 'constraints': - constraints.isEmpty ? defaultSdpConstraints : constraints, - }); - - String sdp = response['sdp']; - String type = response['type']; - return RTCSessionDescription(sdp, type); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::createOffer: ${e.message}'; - } - } - - Future createAnswer( - Map constraints) async { - try { - final response = await _channel.invokeMethod>( - 'createAnswer', { - 'peerConnectionId': _peerConnectionId, - 'constraints': - constraints.isEmpty ? defaultSdpConstraints : constraints, - }); - String sdp = response['sdp']; - String type = response['type']; - return RTCSessionDescription(sdp, type); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::createAnswer: ${e.message}'; - } - } - - Future addStream(MediaStream stream) async { - _localStreams.add(stream); - await _channel.invokeMethod('addStream', { - 'peerConnectionId': _peerConnectionId, - 'streamId': stream.id, - }); - } - - Future removeStream(MediaStream stream) async { - _localStreams.removeWhere((it) => it.id == stream.id); - await _channel.invokeMethod('removeStream', { - 'peerConnectionId': _peerConnectionId, - 'streamId': stream.id, - }); - } - - Future setLocalDescription(RTCSessionDescription description) async { - try { - await _channel.invokeMethod('setLocalDescription', { - 'peerConnectionId': _peerConnectionId, - 'description': description.toMap(), - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::setLocalDescription: ${e.message}'; - } - } - - Future setRemoteDescription(RTCSessionDescription description) async { - try { - await _channel.invokeMethod('setRemoteDescription', { - 'peerConnectionId': _peerConnectionId, - 'description': description.toMap(), - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::setRemoteDescription: ${e.message}'; - } - } - - Future getLocalDescription() async { - try { - final response = await _channel.invokeMethod>( - 'getLocalDescription', { - 'peerConnectionId': _peerConnectionId, - }); - String sdp = response['sdp']; - String type = response['type']; - return RTCSessionDescription(sdp, type); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::getLocalDescription: ${e.message}'; - } - } - - Future getRemoteDescription() async { - try { - final response = await _channel.invokeMethod>( - 'getRemoteDescription', { - 'peerConnectionId': _peerConnectionId, - }); - String sdp = response['sdp']; - String type = response['type']; - return RTCSessionDescription(sdp, type); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::getRemoteDescription: ${e.message}'; - } - } - - Future addCandidate(RTCIceCandidate candidate) async { - await _channel.invokeMethod('addCandidate', { - 'peerConnectionId': _peerConnectionId, - 'candidate': candidate.toMap(), - }); - } - - Future> getStats([MediaStreamTrack track]) async { - try { - final response = await _channel.invokeMethod>( - 'getStats', { - 'peerConnectionId': _peerConnectionId, - 'track': track != null ? track.id : null - }); - var stats = []; - if (response != null) { - List reports = response['stats']; - reports.forEach((report) { - stats.add(StatsReport(report['id'], report['type'], - report['timestamp'], report['values'])); - }); - } - return stats; - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::getStats: ${e.message}'; - } - } - - List getLocalStreams() { - return _localStreams; - } - - List getRemoteStreams() { - return _remoteStreams; - } - - Future createDataChannel( - String label, RTCDataChannelInit dataChannelDict) async { - try { - await _channel.invokeMethod>( - 'createDataChannel', { - 'peerConnectionId': _peerConnectionId, - 'label': label, - 'dataChannelDict': dataChannelDict.toMap() - }); - _dataChannel = - RTCDataChannel(_peerConnectionId, label, dataChannelDict.id); - return _dataChannel; - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::createDataChannel: ${e.message}'; - } - } - - RTCDTMFSender createDtmfSender(MediaStreamTrack track) { - return RTCDTMFSender(_peerConnectionId); - } - - Future close() async { - try { - await _channel.invokeMethod('peerConnectionClose', { - 'peerConnectionId': _peerConnectionId, - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::close: ${e.message}'; - } - } -} diff --git a/lib/src/rtc_peerconnection_factory.dart b/lib/src/rtc_peerconnection_factory.dart deleted file mode 100644 index 30e716b581..0000000000 --- a/lib/src/rtc_peerconnection_factory.dart +++ /dev/null @@ -1,38 +0,0 @@ -import 'dart:async'; - -import 'media_stream.dart'; -import 'rtc_peerconnection.dart'; -import 'utils.dart'; - -Future createPeerConnection( - Map configuration, - [Map constraints = const {}]) async { - var channel = WebRTC.methodChannel(); - - var defaultConstraints = { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': true}, - ], - }; - - final response = await channel.invokeMethod>( - 'createPeerConnection', - { - 'configuration': configuration, - 'constraints': constraints.isEmpty ? defaultConstraints : constraints - }, - ); - - String peerConnectionId = response['peerConnectionId']; - return RTCPeerConnection(peerConnectionId, configuration); -} - -Future createLocalMediaStream(String label) async { - var _channel = WebRTC.methodChannel(); - - final response = await _channel - .invokeMethod>('createLocalMediaStream'); - - return MediaStream(response['streamId'], label); -} diff --git a/lib/src/rtc_session_description.dart b/lib/src/rtc_session_description.dart deleted file mode 100644 index 2a0f22e024..0000000000 --- a/lib/src/rtc_session_description.dart +++ /dev/null @@ -1,8 +0,0 @@ -class RTCSessionDescription { - RTCSessionDescription(this.sdp, this.type); - String sdp; - String type; - dynamic toMap() { - return {'sdp': sdp, 'type': type}; - } -} diff --git a/lib/src/rtc_stats_report.dart b/lib/src/rtc_stats_report.dart deleted file mode 100644 index 0774e9787c..0000000000 --- a/lib/src/rtc_stats_report.dart +++ /dev/null @@ -1,7 +0,0 @@ -class StatsReport { - StatsReport(this.id, this.type, this.timestamp, this.values); - String id; - String type; - double timestamp; - Map values; -} diff --git a/lib/src/rtc_video_view.dart b/lib/src/rtc_video_view.dart deleted file mode 100644 index 2f5efb1f04..0000000000 --- a/lib/src/rtc_video_view.dart +++ /dev/null @@ -1,178 +0,0 @@ -import 'dart:async'; -import 'dart:math'; - -import 'package:flutter/material.dart'; -import 'package:flutter/services.dart'; - -import 'enums.dart'; -import 'media_stream.dart'; -import 'utils.dart'; - -@immutable -class RTCVideoValue { - const RTCVideoValue({ - this.width = 0.0, - this.height = 0.0, - this.rotation = 0, - this.renderVideo = false, - }); - static const empty = RTCVideoValue(); - final double width; - final double height; - final int rotation; - final bool renderVideo; - double get aspectRatio { - if (width == 0.0 || height == 0.0) { - return 1.0; - } - return (rotation == 90 || rotation == 270) - ? height / width - : width / height; - } - - RTCVideoValue copyWith({ - double width, - double height, - int rotation, - bool renderVideo, - }) { - return RTCVideoValue( - width: width ?? this.width, - height: height ?? this.height, - rotation: rotation ?? this.rotation, - renderVideo: (this.width != 0 && this.height != 0 && renderVideo) ?? - this.renderVideo, - ); - } - - @override - String toString() => - '$runtimeType(width: $width, height: $height, rotation: $rotation)'; -} - -class RTCVideoRenderer extends ValueNotifier { - RTCVideoRenderer() : super(RTCVideoValue.empty); - final _channel = WebRTC.methodChannel(); - int _textureId; - MediaStream _srcObject; - StreamSubscription _eventSubscription; - - Future initialize() async { - final response = await _channel - .invokeMethod>('createVideoRenderer', {}); - _textureId = response['textureId']; - _eventSubscription = EventChannel('FlutterWebRTC/Texture$textureId') - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - - int get textureId => _textureId; - - MediaStream get srcObject => _srcObject; - - set srcObject(MediaStream stream) { - if (textureId == null) throw 'Call initialize before setting the stream'; - - _srcObject = stream; - _channel.invokeMethod('videoRendererSetSrcObject', { - 'textureId': textureId, - 'streamId': stream?.id ?? '', - 'ownerTag': stream?.ownerTag ?? '' - }).then((_) { - value = (stream == null) - ? RTCVideoValue.empty - : value.copyWith(renderVideo: renderVideo); - }); - } - - @override - Future dispose() async { - super.dispose(); - await _eventSubscription?.cancel(); - await _channel.invokeMethod( - 'videoRendererDispose', - {'textureId': _textureId}, - ); - } - - void eventListener(dynamic event) { - final Map map = event; - switch (map['event']) { - case 'didTextureChangeRotation': - value = - value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); - break; - case 'didTextureChangeVideoSize': - value = value.copyWith( - width: 0.0 + map['width'], - height: 0.0 + map['height'], - renderVideo: renderVideo); - break; - case 'didFirstFrameRendered': - break; - } - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - bool get renderVideo => srcObject != null; -} - -class RTCVideoView extends StatelessWidget { - RTCVideoView( - this._renderer, { - Key key, - this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, - this.mirror = false, - }) : assert(objectFit != null), - assert(mirror != null), - super(key: key); - - final RTCVideoRenderer _renderer; - final RTCVideoViewObjectFit objectFit; - final bool mirror; - - @override - Widget build(BuildContext context) { - return LayoutBuilder( - builder: (BuildContext context, BoxConstraints constraints) => - _buildVideoView(constraints)); - } - - Widget _buildVideoView(BoxConstraints constraints) { - return Center( - child: Container( - width: constraints.maxWidth, - height: constraints.maxHeight, - child: FittedBox( - fit: objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain - ? BoxFit.contain - : BoxFit.cover, - child: Center( - child: ValueListenableBuilder( - valueListenable: _renderer, - builder: - (BuildContext context, RTCVideoValue value, Widget child) { - return SizedBox( - width: constraints.maxHeight * value.aspectRatio, - height: constraints.maxHeight, - child: value.renderVideo ? child : Container(), - ); - }, - child: Transform( - transform: Matrix4.identity()..rotateY(mirror ? -pi : 0.0), - alignment: FractionalOffset.center, - child: _renderer.textureId != null - ? Texture(textureId: _renderer.textureId) - : Container(), - ), - ), - ), - ), - ), - ); - } -} diff --git a/lib/src/utils.dart b/lib/src/utils.dart deleted file mode 100644 index f88f73ffc3..0000000000 --- a/lib/src/utils.dart +++ /dev/null @@ -1,14 +0,0 @@ -import 'dart:io'; -import 'package:flutter/services.dart'; - -class WebRTC { - static const MethodChannel _channel = MethodChannel('FlutterWebRTC.Method'); - static MethodChannel methodChannel() => _channel; - - static bool get platformIsDesktop => - Platform.isWindows || Platform.isLinux || Platform.isMacOS; - - static bool get platformIsMobile => Platform.isIOS || Platform.isAndroid; - - static bool get platformIsWeb => false; -} diff --git a/lib/src/video_renderer_extension.dart b/lib/src/video_renderer_extension.dart new file mode 100644 index 0000000000..fa8b7ac78b --- /dev/null +++ b/lib/src/video_renderer_extension.dart @@ -0,0 +1,5 @@ +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +extension VideoRendererExtension on RTCVideoRenderer { + RTCVideoValue get videoValue => value; +} diff --git a/lib/src/web/factory_impl.dart b/lib/src/web/factory_impl.dart new file mode 100644 index 0000000000..0df13f80cd --- /dev/null +++ b/lib/src/web/factory_impl.dart @@ -0,0 +1,6 @@ +import '../desktop_capturer.dart'; + +export 'package:dart_webrtc/dart_webrtc.dart' + hide videoRenderer, MediaDevices, MediaRecorder; + +DesktopCapturer get desktopCapturer => throw UnimplementedError(); diff --git a/lib/src/web/get_user_media.dart b/lib/src/web/get_user_media.dart deleted file mode 100644 index 2831da1493..0000000000 --- a/lib/src/web/get_user_media.dart +++ /dev/null @@ -1,65 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; -import 'dart:js'; -import 'dart:js_util' as jsutil; - -import 'media_stream.dart'; - -class MediaDevices { - static Future getUserMedia( - Map mediaConstraints) async { - mediaConstraints ??= {}; - - try { - if (mediaConstraints['video'] is Map) { - if (mediaConstraints['video']['facingMode'] != null) { - mediaConstraints['video'].remove('facingMode'); - } - } - - mediaConstraints.putIfAbsent('video', () => false); - mediaConstraints.putIfAbsent('audio', () => false); - - final mediaDevices = html.window.navigator.mediaDevices; - final jsStream = await mediaDevices.getUserMedia(mediaConstraints); - return MediaStream(jsStream, 'local'); - } catch (e) { - throw 'Unable to getUserMedia: ${e.toString()}'; - } - } - - static Future getDisplayMedia( - Map mediaConstraints) async { - try { - final mediaDevices = html.window.navigator.mediaDevices; - if (jsutil.hasProperty(mediaDevices, 'getDisplayMedia')) { - final arg = JsObject.jsify(mediaConstraints); - - final jsStream = await jsutil.promiseToFuture( - jsutil.callMethod(mediaDevices, 'getDisplayMedia', [arg])); - return MediaStream(jsStream, 'local'); - } else { - final jsStream = await html.window.navigator.getUserMedia( - video: {'mediaSource': 'screen'}, - audio: mediaConstraints['audio'] ?? false); - return MediaStream(jsStream, 'local'); - } - } catch (e) { - throw 'Unable to getDisplayMedia: ${e.toString()}'; - } - } - - static Future> getSources() async { - final devices = await html.window.navigator.mediaDevices.enumerateDevices(); - final result = []; - for (final device in devices) { - result.add({ - 'deviceId': device.deviceId, - 'groupId': device.groupId, - 'kind': device.kind, - 'label': device.label - }); - } - return result; - } -} diff --git a/lib/src/web/media_recorder.dart b/lib/src/web/media_recorder.dart deleted file mode 100644 index 8939eb0c29..0000000000 --- a/lib/src/web/media_recorder.dart +++ /dev/null @@ -1,65 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; -import 'dart:js' as js; - -import '../enums.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; - -class MediaRecorder { - html.MediaRecorder _recorder; - Completer _completer; - - /// For Android use audioChannel param - /// For iOS use audioTrack - Future start( - String path, { - MediaStreamTrack videoTrack, - MediaStreamTrack audioTrack, - RecorderAudioChannel audioChannel, - int rotation, - }) { - throw 'Use startWeb on Flutter Web!'; - } - - /// Only for Flutter Web - void startWeb( - MediaStream stream, { - Function(dynamic blob, bool isLastOne) onDataChunk, - String mimeType = 'video/webm', - }) { - _recorder = html.MediaRecorder(stream.jsStream, {'mimeType': mimeType}); - if (onDataChunk == null) { - var _chunks = []; - _completer = Completer(); - _recorder.addEventListener('dataavailable', (html.Event event) { - final html.Blob blob = js.JsObject.fromBrowserObject(event)['data']; - if (blob.size > 0) { - _chunks.add(blob); - } - if (_recorder.state == 'inactive') { - final blob = html.Blob(_chunks, mimeType); - _completer?.complete(html.Url.createObjectUrlFromBlob(blob)); - _completer = null; - } - }); - _recorder.onError.listen((error) { - _completer?.completeError(error); - _completer = null; - }); - } else { - _recorder.addEventListener('dataavailable', (html.Event event) { - onDataChunk( - js.JsObject.fromBrowserObject(event)['data'], - _recorder.state == 'inactive', - ); - }); - } - _recorder.start(); - } - - Future stop() { - _recorder?.stop(); - return _completer?.future ?? Future.value(); - } -} diff --git a/lib/src/web/media_stream.dart b/lib/src/web/media_stream.dart deleted file mode 100644 index a0816be421..0000000000 --- a/lib/src/web/media_stream.dart +++ /dev/null @@ -1,47 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; - -import 'media_stream_track.dart'; - -class MediaStream { - MediaStream(this.jsStream, this._ownerTag); - final html.MediaStream jsStream; - final String _ownerTag; - - String get id => jsStream.id; - - String get ownerTag => _ownerTag; - - Future getMediaTracks() { - return Future.value(); - } - - Future addTrack(MediaStreamTrack track, {bool addToNative = true}) { - if (addToNative) { - jsStream.addTrack(track.jsTrack); - } - return Future.value(); - } - - Future removeTrack(MediaStreamTrack track, - {bool removeFromNative = true}) async { - if (removeFromNative) { - jsStream.removeTrack(track.jsTrack); - } - } - - List getAudioTracks() => jsStream - .getAudioTracks() - .map((jsTrack) => MediaStreamTrack(jsTrack)) - .toList(); - - List getVideoTracks() => jsStream - .getVideoTracks() - .map((jsTrack) => MediaStreamTrack(jsTrack)) - .toList(); - - Future dispose() async { - jsStream.getAudioTracks().forEach((track) => track.stop()); - jsStream.getVideoTracks().forEach((track) => track.stop()); - } -} diff --git a/lib/src/web/media_stream_track.dart b/lib/src/web/media_stream_track.dart deleted file mode 100644 index a2d2ea8365..0000000000 --- a/lib/src/web/media_stream_track.dart +++ /dev/null @@ -1,64 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; -import 'dart:js' as js; - -class MediaStreamTrack { - const MediaStreamTrack(this.jsTrack); - - final html.MediaStreamTrack jsTrack; - - set enabled(bool enabled) => jsTrack.enabled = enabled; - - bool get enabled => jsTrack.enabled; - - String get label => jsTrack.label; - - String get kind => jsTrack.kind; - - String get id => jsTrack.id; - - ///Future contains isFrontCamera - ///Throws error if switching camera failed - Future switchCamera() async { - // TODO(cloudwebrtc): ??? - return false; - } - - Future adaptRes(int width, int height) async { - // TODO(cloudwebrtc): ??? - } - - void setVolume(double volume) { - final constraints = jsTrack.getConstraints(); - constraints['volume'] = volume; - js.JsObject.fromBrowserObject(jsTrack) - .callMethod('applyConstraints', [js.JsObject.jsify(constraints)]); - } - - void setMicrophoneMute(bool mute) { - jsTrack.enabled = !mute; - } - - void enableSpeakerphone(bool enable) { - // Should this throw error? - } - - Future captureFrame([String filePath]) async { - final imageCapture = html.ImageCapture(jsTrack); - final bitmap = await imageCapture.grabFrame(); - final html.CanvasElement canvas = html.Element.canvas(); - canvas.width = bitmap.width; - canvas.height = bitmap.height; - final html.ImageBitmapRenderingContext renderer = - canvas.getContext('bitmaprenderer'); - renderer.transferFromImageBitmap(bitmap); - final dataUrl = canvas.toDataUrl(); - bitmap.close(); - return dataUrl; - } - - Future dispose() { - jsTrack.stop(); - return Future.value(); - } -} diff --git a/lib/src/web/rtc_data_channel.dart b/lib/src/web/rtc_data_channel.dart deleted file mode 100644 index b8e8cedda2..0000000000 --- a/lib/src/web/rtc_data_channel.dart +++ /dev/null @@ -1,145 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; -import 'dart:js_util' as jsutil; -import 'dart:typed_data'; - -import '../enums.dart'; - -class RTCDataChannelInit { - bool ordered = true; - int maxRetransmitTime = -1; - int maxRetransmits = -1; - String protocol = 'sctp'; //sctp | quic - String binaryType = 'text'; // "binary" || text - bool negotiated = false; - int id = 0; - Map toMap() { - return { - 'ordered': ordered, - if (maxRetransmitTime > 0) - //https://www.chromestatus.com/features/5198350873788416 - 'maxPacketLifeTime': maxRetransmitTime, - if (maxRetransmits > 0) 'maxRetransmits': maxRetransmits, - 'protocol': protocol, - 'negotiated': negotiated, - if (id != 0) 'id': id - }; - } -} - -/// A class that represents a datachannel message. -/// Can either contain binary data as a [Uint8List] or -/// text data as a [String]. -class RTCDataChannelMessage { - /// Construct a text message with a [String]. - RTCDataChannelMessage(String text) { - _data = text; - _isBinary = false; - } - - /// Construct a binary message with a [Uint8List]. - RTCDataChannelMessage.fromBinary(Uint8List binary) { - _data = binary; - _isBinary = true; - } - dynamic _data; - bool _isBinary; - - /// Tells whether this message contains binary. - /// If this is false, it's a text message. - bool get isBinary => _isBinary; - - MessageType get type => isBinary ? MessageType.binary : MessageType.text; - - /// Text contents of this message as [String]. - /// Use only on text messages. - /// See: [isBinary]. - String get text => _data; - - /// Binary contents of this message as [Uint8List]. - /// Use only on binary messages. - /// See: [isBinary]. - Uint8List get binary => _data; -} - -typedef RTCDataChannelStateCallback = void Function(RTCDataChannelState state); -typedef RTCDataChannelOnMessageCallback = void Function( - RTCDataChannelMessage data); - -class RTCDataChannel { - RTCDataChannel(this._jsDc) { - stateChangeStream = _stateChangeController.stream; - messageStream = _messageController.stream; - _jsDc.onClose.listen((_) { - _state = RTCDataChannelState.RTCDataChannelClosed; - _stateChangeController.add(_state); - if (onDataChannelState != null) { - onDataChannelState(_state); - } - }); - _jsDc.onOpen.listen((_) { - _state = RTCDataChannelState.RTCDataChannelOpen; - _stateChangeController.add(_state); - if (onDataChannelState != null) { - onDataChannelState(_state); - } - }); - _jsDc.onMessage.listen((event) async { - var msg = await _parse(event.data); - _messageController.add(msg); - if (onMessage != null) { - onMessage(msg); - } - }); - } - - final html.RtcDataChannel _jsDc; - RTCDataChannelStateCallback onDataChannelState; - RTCDataChannelOnMessageCallback onMessage; - RTCDataChannelState _state = RTCDataChannelState.RTCDataChannelConnecting; - - /// Get current state. - RTCDataChannelState get state => _state; - - final _stateChangeController = - StreamController.broadcast(sync: true); - final _messageController = - StreamController.broadcast(sync: true); - - /// Stream of state change events. Emits the new state on change. - /// Closes when the [RTCDataChannel] is closed. - Stream stateChangeStream; - - /// Stream of incoming messages. Emits the message. - /// Closes when the [RTCDataChannel] is closed. - Stream messageStream; - - Future _parse(dynamic data) async { - if (data is String) return RTCDataChannelMessage(data); - dynamic arrayBuffer; - if (data is html.Blob) { - // This should never happen actually - arrayBuffer = await jsutil - .promiseToFuture(jsutil.callMethod(data, 'arrayBuffer', [])); - } else { - arrayBuffer = data; - } - return RTCDataChannelMessage.fromBinary(arrayBuffer.asUint8List()); - } - - Future send(RTCDataChannelMessage message) { - if (!message.isBinary) { - _jsDc.send(message.text); - } else { - // This may just work - _jsDc.sendByteBuffer(message.binary.buffer); - // If not, convert to ArrayBuffer/Blob - } - return Future.value(); - } - - Future close() { - _jsDc.close(); - return Future.value(); - } -} diff --git a/lib/src/web/rtc_dtmf_sender.dart b/lib/src/web/rtc_dtmf_sender.dart deleted file mode 100644 index c7a8dec682..0000000000 --- a/lib/src/web/rtc_dtmf_sender.dart +++ /dev/null @@ -1,21 +0,0 @@ -import 'dart:html' as html; - -class RTCDTMFSender { - RTCDTMFSender(this._jsDtmfSender); - final html.RtcDtmfSender _jsDtmfSender; - - /// tones:A String containing the DTMF codes to be transmitted to the recipient. - /// Specifying an empty string as the tones parameter clears the tone - /// buffer, aborting any currently queued tones. A "," character inserts - /// a two second delay. - /// duration: This value must be between 40 ms and 6000 ms (6 seconds). - /// The default is 100 ms. - /// interToneGap: The length of time, in milliseconds, to wait between tones. - /// The browser will enforce a minimum value of 30 ms (that is, - /// if you specify a lower value, 30 ms will be used instead); - /// the default is 70 ms. - Future sendDtmf(String tones, - {int duration = 100, int interToneGap = 70}) async { - return _jsDtmfSender.insertDtmf(tones, duration, interToneGap); - } -} diff --git a/lib/src/web/rtc_ice_candidate.dart b/lib/src/web/rtc_ice_candidate.dart deleted file mode 100644 index fa6e3e0ea5..0000000000 --- a/lib/src/web/rtc_ice_candidate.dart +++ /dev/null @@ -1,22 +0,0 @@ -import 'dart:html' as html; - -class RTCIceCandidate { - RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMlineIndex); - RTCIceCandidate.fromJs(html.RtcIceCandidate jsIceCandidate) - : this(jsIceCandidate.candidate, jsIceCandidate.sdpMid, - jsIceCandidate.sdpMLineIndex); - - final String candidate; - final String sdpMid; - final int sdpMlineIndex; - - dynamic toMap() { - return { - 'candidate': candidate, - 'sdpMid': sdpMid, - 'sdpMLineIndex': sdpMlineIndex - }; - } - - html.RtcIceCandidate toJs() => html.RtcIceCandidate(toMap()); -} diff --git a/lib/src/web/rtc_peerconnection.dart b/lib/src/web/rtc_peerconnection.dart deleted file mode 100644 index c05491235e..0000000000 --- a/lib/src/web/rtc_peerconnection.dart +++ /dev/null @@ -1,242 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; -import 'dart:js' as js; -import 'dart:js_util' as jsutil; - -import '../enums.dart'; -import '../rtc_stats_report.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'rtc_data_channel.dart'; -import 'rtc_dtmf_sender.dart'; -import 'rtc_ice_candidate.dart'; -import 'rtc_session_description.dart'; - -/* - * Delegate for PeerConnection. - */ -typedef SignalingStateCallback = void Function(RTCSignalingState state); -typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); -typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); -typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); -typedef AddStreamCallback = void Function(MediaStream stream); -typedef RemoveStreamCallback = void Function(MediaStream stream); -typedef AddTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RemoveTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); -typedef RenegotiationNeededCallback = void Function(); - -/* - * PeerConnection - */ -class RTCPeerConnection { - RTCPeerConnection(this._peerConnectionId, this._jsPc) { - _jsPc.onAddStream.listen((mediaStreamEvent) { - final jsStream = mediaStreamEvent.stream; - final _remoteStream = _remoteStreams.putIfAbsent( - jsStream.id, () => MediaStream(jsStream, _peerConnectionId)); - - onAddStream?.call(_remoteStream); - - jsStream.onAddTrack.listen((mediaStreamTrackEvent) { - final jsTrack = - (mediaStreamTrackEvent as html.MediaStreamTrackEvent).track; - final track = MediaStreamTrack(jsTrack); - _remoteStream.addTrack(track, addToNative: false).then((_) { - onAddTrack?.call(_remoteStream, track); - }); - }); - - jsStream.onRemoveTrack.listen((mediaStreamTrackEvent) { - final jsTrack = - (mediaStreamTrackEvent as html.MediaStreamTrackEvent).track; - final track = MediaStreamTrack(jsTrack); - _remoteStream.removeTrack(track, removeFromNative: false).then((_) { - onRemoveTrack?.call(_remoteStream, track); - }); - }); - }); - - _jsPc.onDataChannel.listen((dataChannelEvent) { - onDataChannel?.call(RTCDataChannel(dataChannelEvent.channel)); - }); - - _jsPc.onIceCandidate.listen((iceEvent) { - if (iceEvent.candidate != null) { - onIceCandidate?.call(RTCIceCandidate.fromJs(iceEvent.candidate)); - } - }); - - _jsPc.onIceConnectionStateChange.listen((_) { - _iceConnectionState = - iceConnectionStateForString(_jsPc.iceConnectionState); - onIceConnectionState?.call(_iceConnectionState); - }); - - js.JsObject.fromBrowserObject(_jsPc)['onicegatheringstatechange'] = - js.JsFunction.withThis((_) { - _iceGatheringState = iceGatheringStateforString(_jsPc.iceGatheringState); - onIceGatheringState?.call(_iceGatheringState); - }); - - _jsPc.onRemoveStream.listen((mediaStreamEvent) { - final _remoteStream = _remoteStreams.remove(mediaStreamEvent.stream.id); - onRemoveStream?.call(_remoteStream); - }); - - _jsPc.onSignalingStateChange.listen((_) { - _signalingState = signalingStateForString(_jsPc.signalingState); - onSignalingState?.call(_signalingState); - }); - - js.JsObject.fromBrowserObject(_jsPc)['negotiationneeded'] = - js.JsFunction.withThis(() { - onRenegotiationNeeded?.call(); - }); - - js.JsObject.fromBrowserObject(_jsPc)['ontrack'] = - js.JsFunction.withThis((_, trackEvent) { - // TODO(rostopira): trackEvent is JsObject conforming to RTCTrackEvent, - // https://developer.mozilla.org/en-US/docs/Web/API/RTCTrackEvent - print('ontrack arg: $trackEvent'); - }); - } - final String _peerConnectionId; - final html.RtcPeerConnection _jsPc; - final _localStreams = {}; - final _remoteStreams = {}; - final _configuration = {}; - - RTCSignalingState _signalingState; - RTCIceGatheringState _iceGatheringState; - RTCIceConnectionState _iceConnectionState; - - // public: delegate - SignalingStateCallback onSignalingState; - IceGatheringStateCallback onIceGatheringState; - IceConnectionStateCallback onIceConnectionState; - IceCandidateCallback onIceCandidate; - AddStreamCallback onAddStream; - RemoveStreamCallback onRemoveStream; - AddTrackCallback onAddTrack; - RemoveTrackCallback onRemoveTrack; - RTCDataChannelCallback onDataChannel; - RenegotiationNeededCallback onRenegotiationNeeded; - - RTCSignalingState get signalingState => _signalingState; - - RTCIceGatheringState get iceGatheringState => _iceGatheringState; - - RTCIceConnectionState get iceConnectionState => _iceConnectionState; - - Future dispose() { - _jsPc.close(); - return Future.value(); - } - - Map get getConfiguration => _configuration; - - Future setConfiguration(Map configuration) { - _configuration.addAll(configuration); - - _jsPc.setConfiguration(configuration); - return Future.value(); - } - - Future createOffer( - Map constraints) async { - final offer = await _jsPc.createOffer(constraints); - return RTCSessionDescription.fromJs(offer); - } - - Future createAnswer( - Map constraints) async { - final answer = await _jsPc.createAnswer(constraints); - return RTCSessionDescription.fromJs(answer); - } - - Future addStream(MediaStream stream) { - _localStreams.putIfAbsent(stream.jsStream.id, - () => MediaStream(stream.jsStream, _peerConnectionId)); - _jsPc.addStream(stream.jsStream); - return Future.value(); - } - - Future removeStream(MediaStream stream) async { - _localStreams.remove(stream.jsStream.id); - _jsPc.removeStream(stream.jsStream); - return Future.value(); - } - - Future setLocalDescription(RTCSessionDescription description) async { - await _jsPc.setLocalDescription(description.toMap()); - } - - Future setRemoteDescription(RTCSessionDescription description) async { - await _jsPc.setRemoteDescription(description.toMap()); - } - - Future getLocalDescription() async { - return RTCSessionDescription.fromJs(_jsPc.localDescription); - } - - Future getRemoteDescription() async { - return RTCSessionDescription.fromJs(_jsPc.remoteDescription); - } - - Future addCandidate(RTCIceCandidate candidate) async { - await jsutil.promiseToFuture( - jsutil.callMethod(_jsPc, 'addIceCandidate', [candidate.toJs()])); - } - - Future> getStats([MediaStreamTrack track]) async { - final stats = await _jsPc.getStats(); - var report = []; - stats.forEach((key, value) { - report.add( - StatsReport(value['id'], value['type'], value['timestamp'], value)); - }); - return report; - } - - List getLocalStreams() => _jsPc - .getLocalStreams() - .map((jsStream) => _localStreams[jsStream.id]) - .toList(); - - List getRemoteStreams() => _jsPc - .getRemoteStreams() - .map((jsStream) => _remoteStreams[jsStream.id]) - .toList(); - - Future createDataChannel( - String label, RTCDataChannelInit dataChannelDict) { - final map = dataChannelDict.toMap(); - if (dataChannelDict.binaryType == 'binary') { - map['binaryType'] = 'arraybuffer'; // Avoid Blob in data channel - } - - final jsDc = _jsPc.createDataChannel(label, map); - return Future.value(RTCDataChannel(jsDc)); - } - - Future close() async { - _jsPc.close(); - return Future.value(); - } - - //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } - void addTransceiver(String type, Map options) { - if (jsutil.hasProperty(_jsPc, 'addTransceiver')) { - final jsOptions = js.JsObject.jsify(options); - jsutil.callMethod(_jsPc, 'addTransceiver', [type, jsOptions]); - } - } - - RTCDTMFSender createDtmfSender(MediaStreamTrack track) { - var jsDtmfSender = _jsPc.createDtmfSender(track.jsTrack); - return RTCDTMFSender(jsDtmfSender); - } -} diff --git a/lib/src/web/rtc_peerconnection_factory.dart b/lib/src/web/rtc_peerconnection_factory.dart deleted file mode 100644 index f4463999fe..0000000000 --- a/lib/src/web/rtc_peerconnection_factory.dart +++ /dev/null @@ -1,27 +0,0 @@ -import 'dart:async'; -import 'dart:convert'; -import 'dart:html' as html; - -import 'media_stream.dart'; -import 'rtc_peerconnection.dart'; - -Future createPeerConnection( - Map configuration, - Map constraints) async { - final constr = (constraints != null && constraints.isNotEmpty) - ? constraints - : { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': true}, - ], - }; - final jsRtcPc = html.RtcPeerConnection(configuration, constr); - final _peerConnectionId = base64Encode(jsRtcPc.toString().codeUnits); - return RTCPeerConnection(_peerConnectionId, jsRtcPc); -} - -Future createLocalMediaStream(String label) async { - final jsMs = html.MediaStream(); - return MediaStream(jsMs, 'local'); -} diff --git a/lib/src/web/rtc_session_description.dart b/lib/src/web/rtc_session_description.dart deleted file mode 100644 index 91c30d9f03..0000000000 --- a/lib/src/web/rtc_session_description.dart +++ /dev/null @@ -1,16 +0,0 @@ -import 'dart:html' as html; -import 'dart:js' as js; - -class RTCSessionDescription { - RTCSessionDescription(this.sdp, this.type); - RTCSessionDescription.fromJs(html.RtcSessionDescription rsd) - : this(rsd.sdp, rsd.type); - RTCSessionDescription.fromJsObj(js.JsObject js) : this(js['sdp'], js['type']); - - String sdp; - String type; - - Map toMap() { - return {'sdp': sdp, 'type': type}; - } -} diff --git a/lib/src/web/rtc_video_renderer_impl.dart b/lib/src/web/rtc_video_renderer_impl.dart new file mode 100644 index 0000000000..69df097e0c --- /dev/null +++ b/lib/src/web/rtc_video_renderer_impl.dart @@ -0,0 +1,329 @@ +import 'dart:async'; +import 'dart:js_interop'; +import 'dart:ui_web' as web_ui; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:dart_webrtc/dart_webrtc.dart'; +import 'package:web/web.dart' as web; + +// An error code value to error name Map. +// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code +const Map _kErrorValueToErrorName = { + 1: 'MEDIA_ERR_ABORTED', + 2: 'MEDIA_ERR_NETWORK', + 3: 'MEDIA_ERR_DECODE', + 4: 'MEDIA_ERR_SRC_NOT_SUPPORTED', +}; + +// An error code value to description Map. +// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code +const Map _kErrorValueToErrorDescription = { + 1: 'The user canceled the fetching of the video.', + 2: 'A network error occurred while fetching the video, despite having previously been available.', + 3: 'An error occurred while trying to decode the video, despite having previously been determined to be usable.', + 4: 'The video has been found to be unsuitable (missing or in a format not supported by your browser).', +}; + +// The default error message, when the error is an empty string +// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message +const String _kDefaultErrorMessage = + 'No further diagnostic information can be determined or provided.'; + +class RTCVideoRenderer extends ValueNotifier + implements VideoRenderer { + RTCVideoRenderer() + : _textureId = _textureCounter++, + super(RTCVideoValue.empty); + + static const _elementIdForAudioManager = 'html_webrtc_audio_manager_list'; + + web.HTMLAudioElement? _audioElement; + + static int _textureCounter = 1; + + web.MediaStream? _videoStream; + + web.MediaStream? _audioStream; + + MediaStreamWeb? _srcObject; + + final int _textureId; + + bool mirror = false; + + final _subscriptions = []; + + String _objectFit = 'contain'; + + bool _muted = false; + + set objectFit(String fit) { + if (_objectFit == fit) return; + _objectFit = fit; + findHtmlView()?.style.objectFit = fit; + } + + @override + int get videoWidth => value.width.toInt(); + + @override + int get videoHeight => value.height.toInt(); + + @override + int get textureId => _textureId; + + @override + bool get muted => _muted; + + @override + set muted(bool mute) => _audioElement?.muted = _muted = mute; + + @override + bool get renderVideo => _srcObject != null; + + String get _elementIdForAudio => 'audio_$viewType'; + + String get _elementIdForVideo => 'video_$viewType'; + + String get viewType => 'RTCVideoRenderer-$textureId'; + + void _updateAllValues(web.HTMLVideoElement fallback) { + final element = findHtmlView() ?? fallback; + value = value.copyWith( + rotation: 0, + width: element.videoWidth.toDouble(), + height: element.videoHeight.toDouble(), + renderVideo: renderVideo, + ); + } + + @override + MediaStream? get srcObject => _srcObject; + + @override + set srcObject(MediaStream? stream) { + if (stream == null) { + findHtmlView()?.srcObject = null; + _audioElement?.srcObject = null; + _srcObject = null; + return; + } + + _srcObject = stream as MediaStreamWeb; + + if (null != _srcObject) { + if (stream.getVideoTracks().isNotEmpty) { + _videoStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getVideoTracks().toDart) { + _videoStream!.addTrack(track); + } + } + if (stream.getAudioTracks().isNotEmpty) { + _audioStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getAudioTracks().toDart) { + _audioStream!.addTrack(track); + } + } + } else { + _videoStream = null; + _audioStream = null; + } + + if (null != _audioStream) { + if (null == _audioElement) { + _audioElement = web.HTMLAudioElement() + ..id = _elementIdForAudio + ..muted = stream.ownerTag == 'local' + ..autoplay = true; + _ensureAudioManagerDiv().append(_audioElement!); + } + _audioElement?.srcObject = _audioStream; + } + + var videoElement = findHtmlView(); + if (null != videoElement) { + videoElement.srcObject = _videoStream; + _applyDefaultVideoStyles(findHtmlView()!); + } + + value = value.copyWith(renderVideo: renderVideo); + } + + Future setSrcObject({MediaStream? stream, String? trackId}) async { + if (stream == null) { + findHtmlView()?.srcObject = null; + _audioElement?.srcObject = null; + _srcObject = null; + return; + } + + _srcObject = stream as MediaStreamWeb; + + if (null != _srcObject) { + if (stream.getVideoTracks().isNotEmpty) { + _videoStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getVideoTracks().toDart) { + if (track.id == trackId) { + _videoStream!.addTrack(track); + } + } + } + if (stream.getAudioTracks().isNotEmpty) { + _audioStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getAudioTracks().toDart) { + _audioStream!.addTrack(track); + } + } + } else { + _videoStream = null; + _audioStream = null; + } + + if (null != _audioStream) { + if (null == _audioElement) { + _audioElement = web.HTMLAudioElement() + ..id = _elementIdForAudio + ..muted = stream.ownerTag == 'local' + ..autoplay = true; + _ensureAudioManagerDiv().append(_audioElement!); + } + _audioElement?.srcObject = _audioStream; + } + + var videoElement = findHtmlView(); + if (null != videoElement) { + videoElement.srcObject = _videoStream; + _applyDefaultVideoStyles(findHtmlView()!); + } + + value = value.copyWith(renderVideo: renderVideo); + } + + web.HTMLDivElement _ensureAudioManagerDiv() { + var div = web.document.getElementById(_elementIdForAudioManager); + if (null != div) return div as web.HTMLDivElement; + + div = web.HTMLDivElement() + ..id = _elementIdForAudioManager + ..style.display = 'none'; + web.document.body?.append(div); + return div as web.HTMLDivElement; + } + + web.HTMLVideoElement? findHtmlView() { + final element = web.document.getElementById(_elementIdForVideo); + if (null != element) return element as web.HTMLVideoElement; + return null; + } + + @override + Future dispose() async { + _srcObject = null; + for (var s in _subscriptions) { + s.cancel(); + } + final element = findHtmlView(); + element?.removeAttribute('src'); + element?.load(); + _audioElement?.remove(); + final audioManager = web.document.getElementById(_elementIdForAudioManager) + as web.HTMLDivElement?; + if (audioManager != null && !audioManager.hasChildNodes()) { + audioManager.remove(); + } + return super.dispose(); + } + + @override + Future audioOutput(String deviceId) async { + try { + final element = _audioElement; + if (null != element) { + await element.setSinkId(deviceId).toDart; + return true; + } + } catch (e) { + print('Unable to setSinkId: ${e.toString()}'); + } + return false; + } + + @override + Future initialize() async { + web_ui.platformViewRegistry.registerViewFactory(viewType, (int viewId) { + for (var s in _subscriptions) { + s.cancel(); + } + _subscriptions.clear(); + + final element = web.HTMLVideoElement() + ..autoplay = true + ..muted = true + ..controls = false + ..srcObject = _videoStream + ..id = _elementIdForVideo + ..setAttribute('playsinline', 'true'); + + _applyDefaultVideoStyles(element); + + _subscriptions.add( + element.onCanPlay.listen((dynamic _) { + _updateAllValues(element); + }), + ); + + _subscriptions.add( + element.onResize.listen((dynamic _) { + _updateAllValues(element); + onResize?.call(); + }), + ); + + // The error event fires when some form of error occurs while attempting to load or perform the media. + _subscriptions.add( + element.onError.listen((web.Event _) { + // The Event itself (_) doesn't contain info about the actual error. + // We need to look at the HTMLMediaElement.error. + // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error + final error = element.error; + print('RTCVideoRenderer: videoElement.onError, ${error.toString()}'); + throw PlatformException( + code: _kErrorValueToErrorName[error!.code]!, + message: + error.message != '' ? error.message : _kDefaultErrorMessage, + details: _kErrorValueToErrorDescription[error.code], + ); + }), + ); + + _subscriptions.add( + element.onEnded.listen((dynamic _) { + // print('RTCVideoRenderer: videoElement.onEnded'); + }), + ); + + return element; + }); + } + + void _applyDefaultVideoStyles(web.HTMLVideoElement element) { + // Flip the video horizontally if is mirrored. + if (mirror) { + element.style.transform = 'scaleX(-1)'; + } + + element + ..style.objectFit = _objectFit + ..style.border = 'none' + ..style.width = '100%' + ..style.height = '100%'; + } + + @override + Function? onResize; + + @override + Function? onFirstFrameRendered; +} diff --git a/lib/src/web/rtc_video_view.dart b/lib/src/web/rtc_video_view.dart deleted file mode 100644 index 34e5b8b439..0000000000 --- a/lib/src/web/rtc_video_view.dart +++ /dev/null @@ -1,238 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; - -import 'package:flutter/material.dart'; -import 'package:flutter/services.dart'; - -import '../enums.dart'; -import './ui_fake.dart' if (dart.library.html) 'dart:ui' as ui; -import 'media_stream.dart'; - -// An error code value to error name Map. -// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code -const Map _kErrorValueToErrorName = { - 1: 'MEDIA_ERR_ABORTED', - 2: 'MEDIA_ERR_NETWORK', - 3: 'MEDIA_ERR_DECODE', - 4: 'MEDIA_ERR_SRC_NOT_SUPPORTED', -}; - -// An error code value to description Map. -// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code -const Map _kErrorValueToErrorDescription = { - 1: 'The user canceled the fetching of the video.', - 2: 'A network error occurred while fetching the video, despite having previously been available.', - 3: 'An error occurred while trying to decode the video, despite having previously been determined to be usable.', - 4: 'The video has been found to be unsuitable (missing or in a format not supported by your browser).', -}; - -// The default error message, when the error is an empty string -// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message -const String _kDefaultErrorMessage = - 'No further diagnostic information can be determined or provided.'; - -@immutable -class RTCVideoValue { - const RTCVideoValue({ - this.width = 0.0, - this.height = 0.0, - this.rotation = 0, - this.renderVideo = false, - }); - static const RTCVideoValue empty = RTCVideoValue(); - final double width; - final double height; - final int rotation; - final bool renderVideo; - double get aspectRatio { - if (width == 0.0 || height == 0.0) { - return 1.0; - } - return (rotation == 90 || rotation == 270) - ? height / width - : width / height; - } - - RTCVideoValue copyWith({ - double width, - double height, - int rotation, - bool renderVideo, - }) { - return RTCVideoValue( - width: width ?? this.width, - height: height ?? this.height, - rotation: rotation ?? this.rotation, - renderVideo: (this.width != 0 && this.height != 0 && renderVideo) ?? - this.renderVideo, - ); - } - - @override - String toString() => - '$runtimeType(width: $width, height: $height, rotation: $rotation)'; -} - -class RTCVideoRenderer extends ValueNotifier { - RTCVideoRenderer() - : textureId = _textureCounter++, - super(RTCVideoValue.empty); - - static int _textureCounter = 1; - final int textureId; - html.VideoElement videoElement; - MediaStream _srcObject; - final _subscriptions = []; - - bool get muted => videoElement?.muted ?? true; - - set muted(bool mute) => videoElement?.muted = mute; - - bool get renderVideo => videoElement != null && srcObject != null; - - Future initialize() async { - videoElement = html.VideoElement() - //..src = 'https://flutter-webrtc-video-view-RTCVideoRenderer-$textureId' - ..autoplay = true - ..controls = false - ..style.objectFit = 'contain' // contain or cover - ..style.border = 'none'; - - // Allows Safari iOS to play the video inline - videoElement.setAttribute('playsinline', 'true'); - - // ignore: undefined_prefixed_name - ui.platformViewRegistry.registerViewFactory( - 'RTCVideoRenderer-$textureId', (int viewId) => videoElement); - - _subscriptions.add( - videoElement.onCanPlay.listen( - (dynamic _) { - _updateAllValues(); - print('RTCVideoRenderer: videoElement.onCanPlay ${value.toString()}'); - }, - ), - ); - - _subscriptions.add( - videoElement.onResize.listen( - (dynamic _) { - _updateAllValues(); - print('RTCVideoRenderer: videoElement.onResize ${value.toString()}'); - }, - ), - ); - - // The error event fires when some form of error occurs while attempting to load or perform the media. - _subscriptions.add( - videoElement.onError.listen( - (html.Event _) { - // The Event itself (_) doesn't contain info about the actual error. - // We need to look at the HTMLMediaElement.error. - // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error - var error = videoElement.error; - throw PlatformException( - code: _kErrorValueToErrorName[error.code], - message: - error.message != '' ? error.message : _kDefaultErrorMessage, - details: _kErrorValueToErrorDescription[error.code], - ); - }, - ), - ); - - _subscriptions.add( - videoElement.onEnded.listen( - (dynamic _) { - print('RTCVideoRenderer: videoElement.onEnded'); - }, - ), - ); - } - - void _updateAllValues() { - value = value.copyWith( - rotation: 0, - width: videoElement?.videoWidth?.toDouble() ?? 0.0, - height: videoElement?.videoHeight?.toDouble() ?? 0.0, - renderVideo: renderVideo); - } - - MediaStream get srcObject => _srcObject; - - set srcObject(MediaStream stream) { - if (videoElement == null) throw 'Call initialize before setting the stream'; - - if (stream == null) { - videoElement.srcObject = null; - _srcObject = null; - return; - } - _srcObject = stream; - videoElement.srcObject = stream?.jsStream; - videoElement.muted = stream?.ownerTag == 'local' ?? false; - value = value.copyWith(renderVideo: renderVideo); - } - - @override - Future dispose() async { - super.dispose(); - await _srcObject?.dispose(); - _srcObject = null; - _subscriptions.forEach((s) => s.cancel()); - videoElement.removeAttribute('src'); - videoElement.load(); - } -} - -class RTCVideoView extends StatefulWidget { - RTCVideoView( - this._renderer, { - Key key, - this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, - this.mirror = false, - }) : assert(objectFit != null), - assert(mirror != null), - super(key: key); - - final RTCVideoRenderer _renderer; - final RTCVideoViewObjectFit objectFit; - final bool mirror; - @override - _RTCVideoViewState createState() => _RTCVideoViewState(); -} - -class _RTCVideoViewState extends State { - _RTCVideoViewState(); - - @override - void initState() { - super.initState(); - widget._renderer?.addListener(() => setState(() {})); - } - - Widget buildVideoElementView(RTCVideoViewObjectFit objFit, bool mirror) { - // TODO(cloudwebrtc): Add css style for mirror. - widget._renderer.videoElement.style.objectFit = - objFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain - ? 'contain' - : 'cover'; - return HtmlElementView( - viewType: 'RTCVideoRenderer-${widget._renderer.textureId}'); - } - - @override - Widget build(BuildContext context) { - return LayoutBuilder( - builder: (BuildContext context, BoxConstraints constraints) { - return Center( - child: Container( - width: constraints.maxWidth, - height: constraints.maxHeight, - child: widget._renderer.renderVideo - ? buildVideoElementView(widget.objectFit, widget.mirror) - : Container(), - )); - }); - } -} diff --git a/lib/src/web/rtc_video_view_impl.dart b/lib/src/web/rtc_video_view_impl.dart new file mode 100644 index 0000000000..9ef8ff1461 --- /dev/null +++ b/lib/src/web/rtc_video_view_impl.dart @@ -0,0 +1,88 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; + +import 'package:dart_webrtc/dart_webrtc.dart'; +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_video_renderer_impl.dart'; + +class RTCVideoView extends StatefulWidget { + RTCVideoView( + this._renderer, { + super.key, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + this.filterQuality = FilterQuality.low, + this.placeholderBuilder, + }); + + final RTCVideoRenderer _renderer; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + final FilterQuality filterQuality; + final WidgetBuilder? placeholderBuilder; + + @override + RTCVideoViewState createState() => RTCVideoViewState(); +} + +class RTCVideoViewState extends State { + RTCVideoViewState(); + + RTCVideoRenderer get videoRenderer => widget._renderer; + + @override + void initState() { + super.initState(); + videoRenderer.addListener(_onRendererListener); + videoRenderer.mirror = widget.mirror; + videoRenderer.objectFit = + widget.objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? 'contain' + : 'cover'; + } + + void _onRendererListener() { + if (mounted) setState(() {}); + } + + @override + void dispose() { + if (mounted) { + super.dispose(); + } + } + + @override + void didUpdateWidget(RTCVideoView oldWidget) { + super.didUpdateWidget(oldWidget); + Timer( + Duration(milliseconds: 10), () => videoRenderer.mirror = widget.mirror); + videoRenderer.objectFit = + widget.objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? 'contain' + : 'cover'; + } + + Widget buildVideoElementView() { + return HtmlElementView(viewType: videoRenderer.viewType); + } + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return Center( + child: Container( + width: constraints.maxWidth, + height: constraints.maxHeight, + child: widget._renderer.renderVideo + ? buildVideoElementView() + : widget.placeholderBuilder?.call(context) ?? Container(), + ), + ); + }, + ); + } +} diff --git a/lib/src/web/ui_fake.dart b/lib/src/web/ui_fake.dart deleted file mode 100644 index 16a677e014..0000000000 --- a/lib/src/web/ui_fake.dart +++ /dev/null @@ -1,4 +0,0 @@ -// ignore: camel_case_types -class platformViewRegistry { - static dynamic registerViewFactory(String viewId, dynamic cb) {} -} diff --git a/lib/src/web/utils.dart b/lib/src/web/utils.dart index fa2a78a496..9203763bf2 100644 --- a/lib/src/web/utils.dart +++ b/lib/src/web/utils.dart @@ -1,7 +1,24 @@ class WebRTC { static bool get platformIsDesktop => false; + static bool get platformIsWindows => false; + + static bool get platformIsMacOS => false; + + static bool get platformIsLinux => false; + static bool get platformIsMobile => false; + static bool get platformIsIOS => false; + + static bool get platformIsAndroid => false; + static bool get platformIsWeb => true; + + static Future invokeMethod(String methodName, + [dynamic param]) async => + throw UnimplementedError(); + + static Future initialize({Map? options}) async => + throw UnimplementedError('initialize is not supported on web'); } diff --git a/linux/CMakeLists.txt b/linux/CMakeLists.txt new file mode 100644 index 0000000000..5e968390f2 --- /dev/null +++ b/linux/CMakeLists.txt @@ -0,0 +1,64 @@ +cmake_minimum_required(VERSION 3.10) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +set(PLUGIN_NAME "${PROJECT_NAME}_plugin") +set (CMAKE_EXPORT_COMPILE_COMMANDS ON ) +set(CMAKE_CXX_STANDARD 17) + +add_definitions(-DRTC_DESKTOP_DEVICE) + +add_library(${PLUGIN_NAME} SHARED + "../third_party/uuidxx/uuidxx.cc" + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "../common/cpp/src/flutter_common.cc" + "flutter_webrtc_plugin.cc" + "flutter/core_implementations.cc" + "flutter/standard_codec.cc" + "flutter/plugin_registrar.cc" + "task_runner_linux.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/flutter/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/uuidxx" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE +"${CMAKE_CURRENT_SOURCE_DIR}") +target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) +target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) + + +target_link_libraries(${PLUGIN_NAME} PRIVATE + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" + PARENT_SCOPE +) + +# Add $ORIGIN to RPATH so that lib/libflutter_webrtc_plugin.so can find lib/libwebrtc.so at runtime +set_property( + TARGET ${PLUGIN_NAME} + PROPERTY BUILD_RPATH + "\$ORIGIN" +) diff --git a/linux/flutter/binary_messenger_impl.h b/linux/flutter/binary_messenger_impl.h new file mode 100644 index 0000000000..be410be023 --- /dev/null +++ b/linux/flutter/binary_messenger_impl.h @@ -0,0 +1,50 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BINARY_MESSENGER_IMPL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BINARY_MESSENGER_IMPL_H_ + +#include + +#include +#include + +#include "include/flutter/binary_messenger.h" + +namespace flutter { + +// Wrapper around a FlutterDesktopMessengerRef that implements the +// BinaryMessenger API. +class BinaryMessengerImpl : public BinaryMessenger { + public: + explicit BinaryMessengerImpl(FlBinaryMessenger* core_messenger); + + virtual ~BinaryMessengerImpl(); + + // Prevent copying. + BinaryMessengerImpl(BinaryMessengerImpl const&) = delete; + BinaryMessengerImpl& operator=(BinaryMessengerImpl const&) = delete; + + // |flutter::BinaryMessenger| + void Send(const std::string& channel, + const uint8_t* message, + size_t message_size, + BinaryReply reply) const override; + + // |flutter::BinaryMessenger| + void SetMessageHandler(const std::string& channel, + BinaryMessageHandler handler) override; + + private: + // Handle for interacting with the C API. + FlBinaryMessenger* messenger_; + + // A map from channel names to the BinaryMessageHandler that should be called + // for incoming messages on that channel. + std::map handlers_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BINARY_MESSENGER_IMPL_H_ diff --git a/linux/flutter/byte_buffer_streams.h b/linux/flutter/byte_buffer_streams.h new file mode 100644 index 0000000000..55b01c8831 --- /dev/null +++ b/linux/flutter/byte_buffer_streams.h @@ -0,0 +1,102 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BYTE_BUFFER_STREAMS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BYTE_BUFFER_STREAMS_H_ + +#include +#include +#include +#include +#include + +#include "include/flutter/byte_streams.h" + +namespace flutter { + +// Implementation of ByteStreamReader base on a byte array. +class ByteBufferStreamReader : public ByteStreamReader { + public: + // Createa a reader reading from |bytes|, which must have a length of |size|. + // |bytes| must remain valid for the lifetime of this object. + explicit ByteBufferStreamReader(const uint8_t* bytes, size_t size) + : bytes_(bytes), size_(size) {} + + virtual ~ByteBufferStreamReader() = default; + + // |ByteStreamReader| + uint8_t ReadByte() override { + if (location_ >= size_) { + std::cerr << "Invalid read in StandardCodecByteStreamReader" << std::endl; + return 0; + } + return bytes_[location_++]; + } + + // |ByteStreamReader| + void ReadBytes(uint8_t* buffer, size_t length) override { + if (location_ + length > size_) { + std::cerr << "Invalid read in StandardCodecByteStreamReader" << std::endl; + return; + } + std::memcpy(buffer, &bytes_[location_], length); + location_ += length; + } + + // |ByteStreamReader| + void ReadAlignment(uint8_t alignment) override { + uint8_t mod = location_ % alignment; + if (mod) { + location_ += alignment - mod; + } + } + + private: + // The buffer to read from. + const uint8_t* bytes_; + // The total size of the buffer. + size_t size_; + // The current read location. + size_t location_ = 0; +}; + +// Implementation of ByteStreamWriter based on a byte array. +class ByteBufferStreamWriter : public ByteStreamWriter { + public: + // Creates a writer that writes into |buffer|. + // |buffer| must remain valid for the lifetime of this object. + explicit ByteBufferStreamWriter(std::vector* buffer) + : bytes_(buffer) { + assert(buffer); + } + + virtual ~ByteBufferStreamWriter() = default; + + // |ByteStreamWriter| + void WriteByte(uint8_t byte) { bytes_->push_back(byte); } + + // |ByteStreamWriter| + void WriteBytes(const uint8_t* bytes, size_t length) { + assert(length > 0); + bytes_->insert(bytes_->end(), bytes, bytes + length); + } + + // |ByteStreamWriter| + void WriteAlignment(uint8_t alignment) { + uint8_t mod = bytes_->size() % alignment; + if (mod) { + for (int i = 0; i < alignment - mod; ++i) { + WriteByte(0); + } + } + } + + private: + // The buffer to write to. + std::vector* bytes_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BYTE_BUFFER_STREAMS_H_ diff --git a/linux/flutter/core_implementations.cc b/linux/flutter/core_implementations.cc new file mode 100644 index 0000000000..500ccca2ab --- /dev/null +++ b/linux/flutter/core_implementations.cc @@ -0,0 +1,257 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the implementations of any class in the wrapper that +// - is not fully inline, and +// - is necessary for all clients of the wrapper (either app or plugin). +// It exists instead of the usual structure of having some_class_name.cc files +// so that changes to the set of things that need non-header implementations +// are not breaking changes for the template. +// +// If https://github.com/flutter/flutter/issues/57146 is fixed, this can be +// removed in favor of the normal structure since templates will no longer +// manually include files. + +#include +#include +#include + +#include "binary_messenger_impl.h" +#include "include/flutter/engine_method_result.h" +#include "include/flutter/texture_registrar.h" +#include "texture_registrar_impl.h" + +struct FlTextureProxy { + FlPixelBufferTexture parent_instance; + flutter::TextureVariant* texture = nullptr; +}; + +struct FlTextureProxyClass { + FlPixelBufferTextureClass parent_class; +}; + +G_DEFINE_TYPE(FlTextureProxy, + fl_texture_proxy, + fl_pixel_buffer_texture_get_type()) + +#define FL_TEXTURE_PROXY(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), fl_texture_proxy_get_type(), \ + FlTextureProxy)) + +static gboolean fl_texture_proxy_copy_pixels(FlPixelBufferTexture* texture, + const uint8_t** out_buffer, + uint32_t* width, + uint32_t* height, + GError** error) { + FlTextureProxy* proxy = FL_TEXTURE_PROXY(texture); + flutter::PixelBufferTexture& pixel_buffer = + std::get(*proxy->texture); + const FlutterDesktopPixelBuffer* copy = + pixel_buffer.CopyPixelBuffer(*width, *height); + if (copy == nullptr) { + return TRUE; + } + *out_buffer = copy->buffer; + *width = copy->width; + *height = copy->height; + return TRUE; +} + +static FlTextureProxy* fl_texture_proxy_new(flutter::TextureVariant* texture) { + FlTextureProxy* proxy = + FL_TEXTURE_PROXY(g_object_new(fl_texture_proxy_get_type(), nullptr)); + proxy->texture = texture; + return proxy; +} + +static void fl_texture_proxy_class_init(FlTextureProxyClass* klass) { + FL_PIXEL_BUFFER_TEXTURE_CLASS(klass)->copy_pixels = + fl_texture_proxy_copy_pixels; +} + +static void fl_texture_proxy_init(FlTextureProxy* self) {} + +namespace flutter { + +// ========== binary_messenger_impl.h ========== + +namespace { +// Passes |message| to |user_data|, which must be a BinaryMessageHandler, along +// with a BinaryReply that will send a response on |message|'s response handle. +// +// This serves as an adaptor between the function-pointer-based message callback +// interface provided by the C API and the std::function-based message handler +// interface of BinaryMessenger. +static void ForwardToHandler(FlBinaryMessenger* messenger, + const gchar* channel, + GBytes* message, + FlBinaryMessengerResponseHandle* response_handle, + gpointer user_data) { + auto handler = g_object_ref(response_handle); + BinaryReply reply_handler = [messenger, handler](const uint8_t* reply, + size_t reply_size) mutable { + if (!handler) { + std::cerr << "Error: Response can be set only once. Ignoring " + "duplicate response." + << std::endl; + return; + } + + g_autoptr(GBytes) response = g_bytes_new(reply, reply_size); + GError* error = nullptr; + if (!fl_binary_messenger_send_response( + messenger, (FlBinaryMessengerResponseHandle*)handler, response, + &error)) { + g_warning("Failed to send binary response: %s", error->message); + } + }; + + const BinaryMessageHandler& message_handler = + *static_cast(user_data); + + if (user_data == nullptr) { + std::cerr << "Error: user_data is null" << std::endl; + return; + } + + message_handler( + static_cast(g_bytes_get_data(message, nullptr)), + g_bytes_get_size(message), std::move(reply_handler)); +} +} // namespace + +BinaryMessengerImpl::BinaryMessengerImpl(FlBinaryMessenger* core_messenger) + : messenger_(core_messenger) {} + +BinaryMessengerImpl::~BinaryMessengerImpl() = default; + +struct Captures { + BinaryReply reply; +}; + +static void message_reply_cb(GObject* object, + GAsyncResult* result, + gpointer user_data) { + g_autoptr(GError) error = nullptr; + auto captures = reinterpret_cast(user_data); + g_autoptr(GBytes) message = fl_binary_messenger_send_on_channel_finish( + FL_BINARY_MESSENGER(object), result, &error); + captures->reply( + static_cast(g_bytes_get_data(message, nullptr)), + g_bytes_get_size(message)); + delete captures; +}; + +void BinaryMessengerImpl::Send(const std::string& channel, + const uint8_t* message, + size_t message_size, + BinaryReply reply) const { + if (reply == nullptr) { + g_autoptr(GBytes) data = g_bytes_new(message, message_size); + fl_binary_messenger_send_on_channel(messenger_, channel.c_str(), data, + nullptr, nullptr, nullptr); + return; + } + + auto captures = new Captures(); + captures->reply = reply; + + g_autoptr(GBytes) data = g_bytes_new(message, message_size); + fl_binary_messenger_send_on_channel(messenger_, channel.c_str(), data, + nullptr, message_reply_cb, captures); +} + +void BinaryMessengerImpl::SetMessageHandler(const std::string& channel, + BinaryMessageHandler handler) { + if (!handler) { + handlers_.erase(channel); + fl_binary_messenger_set_message_handler_on_channel( + messenger_, channel.c_str(), nullptr, nullptr, nullptr); + return; + } + // Save the handler, to keep it alive. + handlers_[channel] = std::move(handler); + BinaryMessageHandler* message_handler = &handlers_[channel]; + // Set an adaptor callback that will invoke the handler. + fl_binary_messenger_set_message_handler_on_channel( + messenger_, channel.c_str(), ForwardToHandler, message_handler, nullptr); +} + +// ========== engine_method_result.h ========== + +namespace internal { + +ReplyManager::ReplyManager(BinaryReply reply_handler) + : reply_handler_(std::move(reply_handler)) { + assert(reply_handler_); +} + +ReplyManager::~ReplyManager() { + if (reply_handler_) { + // Warn, rather than send a not-implemented response, since the engine may + // no longer be valid at this point. + std::cerr + << "Warning: Failed to respond to a message. This is a memory leak." + << std::endl; + } +} + +void ReplyManager::SendResponseData(const std::vector* data) { + if (!reply_handler_) { + std::cerr + << "Error: Only one of Success, Error, or NotImplemented can be " + "called," + << " and it can be called exactly once. Ignoring duplicate result." + << std::endl; + return; + } + + const uint8_t* message = data && !data->empty() ? data->data() : nullptr; + size_t message_size = data ? data->size() : 0; + reply_handler_(message, message_size); + reply_handler_ = nullptr; +} + +} // namespace internal + +// ========== texture_registrar_impl.h ========== + +TextureRegistrarImpl::TextureRegistrarImpl( + FlTextureRegistrar* texture_registrar_ref) + : texture_registrar_ref_(texture_registrar_ref) {} + +TextureRegistrarImpl::~TextureRegistrarImpl() = default; + +int64_t TextureRegistrarImpl::RegisterTexture(TextureVariant* texture) { + auto texture_proxy = fl_texture_proxy_new(texture); + fl_texture_registrar_register_texture(texture_registrar_ref_, + FL_TEXTURE(texture_proxy)); + int64_t texture_id = reinterpret_cast(texture_proxy); + textures_[texture_id] = texture_proxy; + return texture_id; +} + +bool TextureRegistrarImpl::MarkTextureFrameAvailable(int64_t texture_id) { + auto it = textures_.find(texture_id); + if (it != textures_.end()) { + return fl_texture_registrar_mark_texture_frame_available( + texture_registrar_ref_, FL_TEXTURE(it->second)); + } + return false; +} + +bool TextureRegistrarImpl::UnregisterTexture(int64_t texture_id) { + auto it = textures_.find(texture_id); + if (it != textures_.end()) { + auto texture = it->second; + textures_.erase(it); + bool success = fl_texture_registrar_unregister_texture( + texture_registrar_ref_, FL_TEXTURE(texture)); + g_object_unref(texture); + return success; + } + return false; +} + +} // namespace flutter diff --git a/linux/flutter/include/flutter/basic_message_channel.h b/linux/flutter/include/flutter/basic_message_channel.h new file mode 100644 index 0000000000..c0819465c5 --- /dev/null +++ b/linux/flutter/include/flutter/basic_message_channel.h @@ -0,0 +1,110 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BASIC_MESSAGE_CHANNEL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BASIC_MESSAGE_CHANNEL_H_ + +#include +#include + +#include "binary_messenger.h" +#include "message_codec.h" + +namespace flutter { + +class EncodableValue; + +// A message reply callback. +// +// Used for submitting a reply back to a Flutter message sender. +template +using MessageReply = std::function; + +// A handler for receiving a message from the Flutter engine. +// +// Implementations must asynchronously call reply exactly once with the reply +// to the message. +template +using MessageHandler = + std::function& reply)>; + +// A channel for communicating with the Flutter engine by sending asynchronous +// messages. +template +class BasicMessageChannel { + public: + // Creates an instance that sends and receives method calls on the channel + // named |name|, encoded with |codec| and dispatched via |messenger|. + BasicMessageChannel(BinaryMessenger* messenger, + const std::string& name, + const MessageCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + + ~BasicMessageChannel() = default; + + // Prevent copying. + BasicMessageChannel(BasicMessageChannel const&) = delete; + BasicMessageChannel& operator=(BasicMessageChannel const&) = delete; + + // Sends a message to the Flutter engine on this channel. + void Send(const T& message) { + std::unique_ptr> raw_message = + codec_->EncodeMessage(message); + messenger_->Send(name_, raw_message->data(), raw_message->size()); + } + + // Sends a message to the Flutter engine on this channel expecting a reply. + void Send(const T& message, BinaryReply reply) { + std::unique_ptr> raw_message = + codec_->EncodeMessage(message); + messenger_->Send(name_, raw_message->data(), raw_message->size(), reply); + } + + // Registers a handler that should be called any time a message is + // received on this channel. A null handler will remove any previous handler. + // + // Note that the BasicMessageChannel does not own the handler, and will not + // unregister it on destruction, so the caller is responsible for + // unregistering explicitly if it should no longer be called. + void SetMessageHandler(const MessageHandler& handler) const { + if (!handler) { + messenger_->SetMessageHandler(name_, nullptr); + return; + } + const auto* codec = codec_; + std::string channel_name = name_; + BinaryMessageHandler binary_handler = [handler, codec, channel_name]( + const uint8_t* binary_message, + const size_t binary_message_size, + BinaryReply binary_reply) { + // Use this channel's codec to decode the message and build a reply + // handler. + std::unique_ptr message = + codec->DecodeMessage(binary_message, binary_message_size); + if (!message) { + std::cerr << "Unable to decode message on channel " << channel_name + << std::endl; + binary_reply(nullptr, 0); + return; + } + + MessageReply unencoded_reply = [binary_reply, + codec](const T& unencoded_response) { + auto binary_response = codec->EncodeMessage(unencoded_response); + binary_reply(binary_response->data(), binary_response->size()); + }; + handler(*message, std::move(unencoded_reply)); + }; + messenger_->SetMessageHandler(name_, std::move(binary_handler)); + } + + private: + BinaryMessenger* messenger_; + std::string name_; + const MessageCodec* codec_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BASIC_MESSAGE_CHANNEL_H_ diff --git a/linux/flutter/include/flutter/binary_messenger.h b/linux/flutter/include/flutter/binary_messenger.h new file mode 100644 index 0000000000..d552b499ee --- /dev/null +++ b/linux/flutter/include/flutter/binary_messenger.h @@ -0,0 +1,52 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BINARY_MESSENGER_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BINARY_MESSENGER_H_ + +#include +#include + +namespace flutter { + +// A binary message reply callback. +// +// Used for submitting a binary reply back to a Flutter message sender. +typedef std::function + BinaryReply; + +// A message handler callback. +// +// Used for receiving messages from Flutter and providing an asynchronous reply. +typedef std::function< + void(const uint8_t* message, size_t message_size, BinaryReply reply)> + BinaryMessageHandler; + +// A protocol for a class that handles communication of binary data on named +// channels to and from the Flutter engine. +class BinaryMessenger { + public: + virtual ~BinaryMessenger() = default; + + // Sends a binary message to the Flutter engine on the specified channel. + // + // If |reply| is provided, it will be called back with the response from the + // engine. + virtual void Send(const std::string& channel, + const uint8_t* message, + size_t message_size, + BinaryReply reply = nullptr) const = 0; + + // Registers a message handler for incoming binary messages from the Flutter + // side on the specified channel. + // + // Replaces any existing handler. Provide a null handler to unregister the + // existing handler. + virtual void SetMessageHandler(const std::string& channel, + BinaryMessageHandler handler) = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BINARY_MESSENGER_H_ diff --git a/linux/flutter/include/flutter/byte_streams.h b/linux/flutter/include/flutter/byte_streams.h new file mode 100644 index 0000000000..3360bab945 --- /dev/null +++ b/linux/flutter/include/flutter/byte_streams.h @@ -0,0 +1,85 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BYTE_STREAMS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BYTE_STREAMS_H_ + +// Interfaces for interacting with a stream of bytes, for use in codecs. + +namespace flutter { + +// An interface for a class that reads from a byte stream. +class ByteStreamReader { + public: + explicit ByteStreamReader() = default; + virtual ~ByteStreamReader() = default; + + // Reads and returns the next byte from the stream. + virtual uint8_t ReadByte() = 0; + + // Reads the next |length| bytes from the stream into |buffer|. The caller + // is responsible for ensuring that |buffer| is large enough. + virtual void ReadBytes(uint8_t* buffer, size_t length) = 0; + + // Advances the read cursor to the next multiple of |alignment| relative to + // the start of the stream, unless it is already aligned. + virtual void ReadAlignment(uint8_t alignment) = 0; + + // Reads and returns the next 32-bit integer from the stream. + int32_t ReadInt32() { + int32_t value = 0; + ReadBytes(reinterpret_cast(&value), 4); + return value; + } + + // Reads and returns the next 64-bit integer from the stream. + int64_t ReadInt64() { + int64_t value = 0; + ReadBytes(reinterpret_cast(&value), 8); + return value; + } + + // Reads and returns the next 64-bit floating point number from the stream. + double ReadDouble() { + double value = 0; + ReadBytes(reinterpret_cast(&value), 8); + return value; + } +}; + +// An interface for a class that writes to a byte stream. +class ByteStreamWriter { + public: + explicit ByteStreamWriter() = default; + virtual ~ByteStreamWriter() = default; + + // Writes |byte| to the stream. + virtual void WriteByte(uint8_t byte) = 0; + + // Writes the next |length| bytes from |bytes| to the stream + virtual void WriteBytes(const uint8_t* bytes, size_t length) = 0; + + // Writes 0s until the next multiple of |alignment| relative to the start + // of the stream, unless the write positition is already aligned. + virtual void WriteAlignment(uint8_t alignment) = 0; + + // Writes the given 32-bit int to the stream. + void WriteInt32(int32_t value) { + WriteBytes(reinterpret_cast(&value), 4); + } + + // Writes the given 64-bit int to the stream. + void WriteInt64(int64_t value) { + WriteBytes(reinterpret_cast(&value), 8); + } + + // Writes the given 36-bit double to the stream. + void WriteDouble(double value) { + WriteBytes(reinterpret_cast(&value), 8); + } +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BYTE_STREAMS_H_ diff --git a/linux/flutter/include/flutter/encodable_value.h b/linux/flutter/include/flutter/encodable_value.h new file mode 100644 index 0000000000..96490a2e12 --- /dev/null +++ b/linux/flutter/include/flutter/encodable_value.h @@ -0,0 +1,222 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENCODABLE_VALUE_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENCODABLE_VALUE_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + +// Unless overridden, attempt to detect the RTTI state from the compiler. +#ifndef FLUTTER_ENABLE_RTTI +#if defined(_MSC_VER) +#ifdef _CPPRTTI +#define FLUTTER_ENABLE_RTTI 1 +#endif +#elif defined(__clang__) +#if __has_feature(cxx_rtti) +#define FLUTTER_ENABLE_RTTI 1 +#endif +#elif defined(__GNUC__) +#ifdef __GXX_RTTI +#define FLUTTER_ENABLE_RTTI 1 +#endif +#endif +#endif // #ifndef FLUTTER_ENABLE_RTTI + +namespace flutter { + +static_assert(sizeof(double) == 8, "EncodableValue requires a 64-bit double"); + +// A container for arbitrary types in EncodableValue. +// +// This is used in conjunction with StandardCodecExtension to allow using other +// types with a StandardMethodCodec/StandardMessageCodec. It is implicitly +// convertible to EncodableValue, so constructing an EncodableValue from a +// custom type can generally be written as: +// CustomEncodableValue(MyType(...)) +// rather than: +// EncodableValue(CustomEncodableValue(MyType(...))) +// +// For extracting received custom types, it is implicitly convertible to +// std::any. For example: +// const MyType& my_type_value = +// std::any_cast(std::get(value)); +// +// If RTTI is enabled, different extension types can be checked with type(): +// if (custom_value->type() == typeid(SomeData)) { ... } +// Clients that wish to disable RTTI would need to decide on another approach +// for distinguishing types (e.g., in StandardCodecExtension::WriteValueOfType) +// if multiple custom types are needed. For instance, wrapping all of the +// extension types in an EncodableValue-style variant, and only ever storing +// that variant in CustomEncodableValue. +class CustomEncodableValue { + public: + explicit CustomEncodableValue(const std::any& value) : value_(value) {} + ~CustomEncodableValue() = default; + + // Allow implicit conversion to std::any to allow direct use of any_cast. + // NOLINTNEXTLINE(google-explicit-constructor) + operator std::any &() { return value_; } + // NOLINTNEXTLINE(google-explicit-constructor) + operator const std::any &() const { return value_; } + +#if defined(FLUTTER_ENABLE_RTTI) && FLUTTER_ENABLE_RTTI + // Passthrough to std::any's type(). + const std::type_info& type() const noexcept { return value_.type(); } +#endif + + // This operator exists only to provide a stable ordering for use as a + // std::map key, to satisfy the compiler requirements for EncodableValue. + // It does not attempt to provide useful ordering semantics, and using a + // custom value as a map key is not recommended. + bool operator<(const CustomEncodableValue& other) const { + return this < &other; + } + bool operator==(const CustomEncodableValue& other) const { + return this == &other; + } + + private: + std::any value_; +}; + +class EncodableValue; + +// Convenience type aliases. +using EncodableList = std::vector; +using EncodableMap = std::map; + +namespace internal { +// The base class for EncodableValue. Do not use this directly; it exists only +// for EncodableValue to inherit from. +// +// Do not change the order or indexes of the items here; see the comment on +// EncodableValue +using EncodableValueVariant = std::variant, + std::vector, + std::vector, + std::vector, + EncodableList, + EncodableMap, + CustomEncodableValue, + std::vector>; +} // namespace internal + +// An object that can contain any value or collection type supported by +// Flutter's standard method codec. +// +// For details, see: +// https://api.flutter.dev/flutter/services/StandardMessageCodec-class.html +// +// As an example, the following Dart structure: +// { +// 'flag': true, +// 'name': 'Thing', +// 'values': [1, 2.0, 4], +// } +// would correspond to: +// EncodableValue(EncodableMap{ +// {EncodableValue("flag"), EncodableValue(true)}, +// {EncodableValue("name"), EncodableValue("Thing")}, +// {EncodableValue("values"), EncodableValue(EncodableList{ +// EncodableValue(1), +// EncodableValue(2.0), +// EncodableValue(4), +// })}, +// }) +// +// The primary API surface for this object is std::variant. For instance, +// getting a string value from an EncodableValue, with type checking: +// if (std::holds_alternative(value)) { +// std::string some_string = std::get(value); +// } +// +// The order/indexes of the variant types is part of the API surface, and is +// guaranteed not to change. +// +// The variant types are mapped with Dart types in following ways: +// std::monostate -> null +// bool -> bool +// int32_t -> int +// int64_t -> int +// double -> double +// std::string -> String +// std::vector -> Uint8List +// std::vector -> Int32List +// std::vector -> Int64List +// std::vector -> Float32List +// std::vector -> Float64List +// EncodableList -> List +// EncodableMap -> Map +class EncodableValue : public internal::EncodableValueVariant { + public: + // Rely on std::variant for most of the constructors/operators. + using super = internal::EncodableValueVariant; + using super::super; + using super::operator=; + + explicit EncodableValue() = default; + + // Avoid the C++17 pitfall of conversion from char* to bool. Should not be + // needed for C++20. + explicit EncodableValue(const char* string) : super(std::string(string)) {} + EncodableValue& operator=(const char* other) { + *this = std::string(other); + return *this; + } + + // Allow implicit conversion from CustomEncodableValue; the only reason to + // make a CustomEncodableValue (which can only be constructed explicitly) is + // to use it with EncodableValue, so the risk of unintended conversions is + // minimal, and it avoids the need for the verbose: + // EncodableValue(CustomEncodableValue(...)). + // NOLINTNEXTLINE(google-explicit-constructor) + EncodableValue(const CustomEncodableValue& v) : super(v) {} + + // Override the conversion constructors from std::variant to make them + // explicit, to avoid implicit conversion. + // + // While implicit conversion can be convenient in some cases, it can have very + // surprising effects. E.g., calling a function that takes an EncodableValue + // but accidentally passing an EncodableValue* would, instead of failing to + // compile, go through a pointer->bool->EncodableValue(bool) chain and + // silently call the function with a temp-constructed EncodableValue(true). + template + constexpr explicit EncodableValue(T&& t) noexcept : super(t) {} + + // Returns true if the value is null. Convenience wrapper since unlike the + // other types, std::monostate uses aren't self-documenting. + bool IsNull() const { return std::holds_alternative(*this); } + + // Convenience method to simplify handling objects received from Flutter + // where the values may be larger than 32-bit, since they have the same type + // on the Dart side, but will be either 32-bit or 64-bit here depending on + // the value. + // + // Calling this method if the value doesn't contain either an int32_t or an + // int64_t will throw an exception. + int64_t LongValue() const { + if (std::holds_alternative(*this)) { + return std::get(*this); + } + return std::get(*this); + } +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENCODABLE_VALUE_H_ diff --git a/linux/flutter/include/flutter/engine_method_result.cc b/linux/flutter/include/flutter/engine_method_result.cc new file mode 100644 index 0000000000..65eaf5d435 --- /dev/null +++ b/linux/flutter/include/flutter/engine_method_result.cc @@ -0,0 +1,11 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is deprecated in favor of core_implementations.cc. This is a +// temporary forwarding implementation so that the switch to +// core_implementations.cc isn't an immediate breaking change, allowing for the +// template to be updated to include it and update the template version before +// removing this file. + +#include "core_implementations.cc" diff --git a/linux/flutter/include/flutter/engine_method_result.h b/linux/flutter/include/flutter/engine_method_result.h new file mode 100644 index 0000000000..3cc8b6a22c --- /dev/null +++ b/linux/flutter/include/flutter/engine_method_result.h @@ -0,0 +1,86 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_ + +#include +#include +#include + +#include "binary_messenger.h" +#include "method_codec.h" +#include "method_result.h" + +namespace flutter { + +namespace internal { +// Manages the one-time sending of response data. This is an internal helper +// class for EngineMethodResult, separated out since the implementation doesn't +// vary based on the template type. +class ReplyManager { + public: + explicit ReplyManager(BinaryReply reply_handler_); + ~ReplyManager(); + + // Prevent copying. + ReplyManager(ReplyManager const&) = delete; + ReplyManager& operator=(ReplyManager const&) = delete; + + // Sends the given response data (which must either be nullptr, which + // indicates an unhandled method, or a response serialized with |codec_|) to + // the engine. + void SendResponseData(const std::vector* data); + + private: + BinaryReply reply_handler_; +}; +} // namespace internal + +// Implemention of MethodResult that sends a response to the Flutter engine +// exactly once, encoded using a given codec. +template +class EngineMethodResult : public MethodResult { + public: + // Creates a result object that will send results to |reply_handler|, encoded + // using |codec|. The |codec| pointer must remain valid for as long as this + // object exists. + EngineMethodResult(BinaryReply reply_handler, const MethodCodec* codec) + : reply_manager_( + std::make_unique(std::move(reply_handler))), + codec_(codec) {} + + ~EngineMethodResult() = default; + + protected: + // |flutter::MethodResult| + void SuccessInternal(const T* result) override { + std::unique_ptr> data = + codec_->EncodeSuccessEnvelope(result); + reply_manager_->SendResponseData(data.get()); + } + + // |flutter::MethodResult| + void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) override { + std::unique_ptr> data = + codec_->EncodeErrorEnvelope(error_code, error_message, error_details); + reply_manager_->SendResponseData(data.get()); + } + + // |flutter::MethodResult| + void NotImplementedInternal() override { + reply_manager_->SendResponseData(nullptr); + } + + private: + std::unique_ptr reply_manager_; + + const MethodCodec* codec_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_ diff --git a/linux/flutter/include/flutter/event_channel.h b/linux/flutter/include/flutter/event_channel.h new file mode 100644 index 0000000000..fe0e1414d6 --- /dev/null +++ b/linux/flutter/include/flutter/event_channel.h @@ -0,0 +1,173 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_ + +#include +#include +#include + +#include "binary_messenger.h" +#include "engine_method_result.h" +#include "event_sink.h" +#include "event_stream_handler.h" + +namespace flutter { + +class EncodableValue; + +// A named channel for communicating with the Flutter application using +// asynchronous event streams. Incoming requests for event stream setup are +// decoded from binary on receipt, and C++ responses and events are encoded into +// binary before being transmitted back to Flutter. The MethodCodec used must be +// compatible with the one used by the Flutter application. This can be achieved +// by creating an EventChannel +// ("https://api.flutter.dev/flutter/services/EventChannel-class.html") +// counterpart of this channel on the Dart side. +// The C++ type of stream configuration arguments, events, and error details are +// templated, but only values supported by the specified MethodCodec can be +// used. +template +class EventChannel { + public: + // Creates an instance that sends and receives event handler on the channel + // named |name|, encoded with |codec| and dispatched via |messenger|. + EventChannel(BinaryMessenger* messenger, + const std::string& name, + const MethodCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + ~EventChannel() = default; + + // Prevent copying. + EventChannel(EventChannel const&) = delete; + EventChannel& operator=(EventChannel const&) = delete; + + // Registers a stream handler on this channel. + // If no handler has been registered, any incoming stream setup requests will + // be handled silently by providing an empty stream. + void SetStreamHandler(std::unique_ptr> handler) { + if (!handler) { + messenger_->SetMessageHandler(name_, nullptr); + is_listening_ = false; + return; + } + + // std::function requires a copyable lambda, so convert to a shared pointer. + // This is safe since only one copy of the shared_pointer will ever be + // accessed. + std::shared_ptr> shared_handler(handler.release()); + const MethodCodec* codec = codec_; + const std::string channel_name = name_; + const BinaryMessenger* messenger = messenger_; + BinaryMessageHandler binary_handler = [shared_handler, codec, channel_name, + messenger, + this](const uint8_t* message, + const size_t message_size, + BinaryReply reply) { + constexpr char kOnListenMethod[] = "listen"; + constexpr char kOnCancelMethod[] = "cancel"; + + std::unique_ptr> method_call = + codec->DecodeMethodCall(message, message_size); + if (!method_call) { + std::cerr << "Unable to construct method call from message on channel: " + << channel_name << std::endl; + reply(nullptr, 0); + return; + } + + const std::string& method = method_call->method_name(); + if (method.compare(kOnListenMethod) == 0) { + if (is_listening_) { + std::unique_ptr> error = + shared_handler->OnCancel(nullptr); + if (error) { + std::cerr << "Failed to cancel existing stream: " + << (error->error_code) << ", " << (error->error_message) + << ", " << (error->error_details); + } + } + is_listening_ = true; + + std::unique_ptr> result; + auto sink = std::make_unique( + messenger, channel_name, codec); + std::unique_ptr> error = + shared_handler->OnListen(method_call->arguments(), std::move(sink)); + if (error) { + result = codec->EncodeErrorEnvelope( + error->error_code, error->error_message, error->error_details); + } else { + result = codec->EncodeSuccessEnvelope(); + } + reply(result->data(), result->size()); + } else if (method.compare(kOnCancelMethod) == 0) { + std::unique_ptr> result; + if (is_listening_) { + std::unique_ptr> error = + shared_handler->OnCancel(method_call->arguments()); + if (error) { + result = codec->EncodeErrorEnvelope( + error->error_code, error->error_message, error->error_details); + } else { + result = codec->EncodeSuccessEnvelope(); + } + is_listening_ = false; + } else { + result = codec->EncodeErrorEnvelope( + "error", "No active stream to cancel", nullptr); + } + reply(result->data(), result->size()); + } else { + reply(nullptr, 0); + } + }; + messenger_->SetMessageHandler(name_, std::move(binary_handler)); + } + + private: + class EventSinkImplementation : public EventSink { + public: + EventSinkImplementation(const BinaryMessenger* messenger, + const std::string& name, + const MethodCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + ~EventSinkImplementation() = default; + + // Prevent copying. + EventSinkImplementation(EventSinkImplementation const&) = delete; + EventSinkImplementation& operator=(EventSinkImplementation const&) = delete; + + private: + const BinaryMessenger* messenger_; + const std::string name_; + const MethodCodec* codec_; + + protected: + void SuccessInternal(const T* event = nullptr) override { + auto result = codec_->EncodeSuccessEnvelope(event); + messenger_->Send(name_, result->data(), result->size()); + } + + void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) override { + auto result = + codec_->EncodeErrorEnvelope(error_code, error_message, error_details); + messenger_->Send(name_, result->data(), result->size()); + } + + void EndOfStreamInternal() override { messenger_->Send(name_, nullptr, 0); } + }; + + BinaryMessenger* messenger_; + const std::string name_; + const MethodCodec* codec_; + bool is_listening_ = false; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_ diff --git a/linux/flutter/include/flutter/event_sink.h b/linux/flutter/include/flutter/event_sink.h new file mode 100644 index 0000000000..789be1eb96 --- /dev/null +++ b/linux/flutter/include/flutter/event_sink.h @@ -0,0 +1,62 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ + +namespace flutter { + +class EncodableValue; + +// Event callback. Events to be sent to Flutter application +// act as clients of this interface for sending events. +template +class EventSink { + public: + EventSink() = default; + virtual ~EventSink() = default; + + // Prevent copying. + EventSink(EventSink const&) = delete; + EventSink& operator=(EventSink const&) = delete; + + // Consumes a successful event + void Success(const T& event) { SuccessInternal(&event); } + + // Consumes a successful event. + void Success() { SuccessInternal(nullptr); } + + // Consumes an error event. + void Error(const std::string& error_code, + const std::string& error_message, + const T& error_details) { + ErrorInternal(error_code, error_message, &error_details); + } + + // Consumes an error event. + void Error(const std::string& error_code, + const std::string& error_message = "") { + ErrorInternal(error_code, error_message, nullptr); + } + + // Consumes end of stream. Ensuing calls to Success() or + // Error(), if any, are ignored. + void EndOfStream() { EndOfStreamInternal(); } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual void SuccessInternal(const T* event = nullptr) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void EndOfStreamInternal() = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ diff --git a/linux/flutter/include/flutter/event_stream_handler.h b/linux/flutter/include/flutter/event_stream_handler.h new file mode 100644 index 0000000000..9eced6cf70 --- /dev/null +++ b/linux/flutter/include/flutter/event_stream_handler.h @@ -0,0 +1,74 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_H_ + +#include "event_sink.h" + +namespace flutter { + +class EncodableValue; + +template +struct StreamHandlerError { + const std::string& error_code; + const std::string& error_message; + const T* error_details; + + StreamHandlerError(const std::string& error_code, + const std::string& error_message, + const T* error_details) + : error_code(error_code), + error_message(error_message), + error_details(error_details) {} +}; + +// Handler for stream setup and teardown requests. +// Implementations must be prepared to accept sequences of alternating calls to +// OnListen() and OnCancel(). Implementations should ideally consume no +// resources when the last such call is not OnListen(). In typical situations, +// this means that the implementation should register itself with +// platform-specific event sources OnListen() and deregister again OnCancel(). +template +class StreamHandler { + public: + StreamHandler() = default; + virtual ~StreamHandler() = default; + + // Prevent copying. + StreamHandler(StreamHandler const&) = delete; + StreamHandler& operator=(StreamHandler const&) = delete; + + // Handles a request to set up an event stream. Returns nullptr on success, + // or an error on failure. + // |arguments| is stream configuration arguments and + // |events| is an EventSink for emitting events to the Flutter receiver. + std::unique_ptr> OnListen( + const T* arguments, + std::unique_ptr>&& events) { + return OnListenInternal(arguments, std::move(events)); + } + + // Handles a request to tear down the most recently created event stream. + // Returns nullptr on success, or an error on failure. + // |arguments| is stream configuration arguments. + std::unique_ptr> OnCancel(const T* arguments) { + return OnCancelInternal(arguments); + } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> OnListenInternal( + const T* arguments, + std::unique_ptr>&& events) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> OnCancelInternal( + const T* arguments) = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_H_ diff --git a/linux/flutter/include/flutter/event_stream_handler_functions.h b/linux/flutter/include/flutter/event_stream_handler_functions.h new file mode 100644 index 0000000000..fde4ce4410 --- /dev/null +++ b/linux/flutter/include/flutter/event_stream_handler_functions.h @@ -0,0 +1,78 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_FUNCTIONS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_FUNCTIONS_H_ + +#include + +#include "event_sink.h" +#include "event_stream_handler.h" + +namespace flutter { + +class EncodableValue; + +// Handler types for each of the StreamHandler setup and teardown +// requests. +template +using StreamHandlerListen = + std::function>( + const T* arguments, + std::unique_ptr>&& events)>; + +template +using StreamHandlerCancel = + std::function>(const T* arguments)>; + +// An implementation of StreamHandler that pass calls through to +// provided function objects. +template +class StreamHandlerFunctions : public StreamHandler { + public: + // Creates a handler object that calls the provided functions + // for the corresponding StreamHandler outcomes. + StreamHandlerFunctions(StreamHandlerListen on_listen, + StreamHandlerCancel on_cancel) + : on_listen_(on_listen), on_cancel_(on_cancel) {} + + virtual ~StreamHandlerFunctions() = default; + + // Prevent copying. + StreamHandlerFunctions(StreamHandlerFunctions const&) = delete; + StreamHandlerFunctions& operator=(StreamHandlerFunctions const&) = delete; + + protected: + // |flutter::StreamHandler| + std::unique_ptr> OnListenInternal( + const T* arguments, + std::unique_ptr>&& events) override { + if (on_listen_) { + return on_listen_(arguments, std::move(events)); + } + + auto error = std::make_unique>( + "error", "No OnListen handler set", nullptr); + return std::move(error); + } + + // |flutter::StreamHandler| + std::unique_ptr> OnCancelInternal( + const T* arguments) override { + if (on_cancel_) { + return on_cancel_(arguments); + } + + auto error = std::make_unique>( + "error", "No OnCancel handler set", nullptr); + return std::move(error); + } + + StreamHandlerListen on_listen_; + StreamHandlerCancel on_cancel_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_FUNCTIONS_H_ diff --git a/linux/flutter/include/flutter/message_codec.h b/linux/flutter/include/flutter/message_codec.h new file mode 100644 index 0000000000..c84d25f241 --- /dev/null +++ b/linux/flutter/include/flutter/message_codec.h @@ -0,0 +1,62 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_MESSAGE_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_MESSAGE_CODEC_H_ + +#include +#include +#include + +namespace flutter { + +// Translates between a binary message and higher-level method call and +// response/error objects. +template +class MessageCodec { + public: + MessageCodec() = default; + + virtual ~MessageCodec() = default; + + // Prevent copying. + MessageCodec(MessageCodec const&) = delete; + MessageCodec& operator=(MessageCodec const&) = delete; + + // Returns the message encoded in |binary_message|, or nullptr if it cannot be + // decoded by this codec. + std::unique_ptr DecodeMessage(const uint8_t* binary_message, + const size_t message_size) const { + return std::move(DecodeMessageInternal(binary_message, message_size)); + } + + // Returns the message encoded in |binary_message|, or nullptr if it cannot be + // decoded by this codec. + std::unique_ptr DecodeMessage( + const std::vector& binary_message) const { + size_t size = binary_message.size(); + const uint8_t* data = size > 0 ? &binary_message[0] : nullptr; + return std::move(DecodeMessageInternal(data, size)); + } + + // Returns a binary encoding of the given |message|, or nullptr if the + // message cannot be serialized by this codec. + std::unique_ptr> EncodeMessage(const T& message) const { + return std::move(EncodeMessageInternal(message)); + } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr DecodeMessageInternal( + const uint8_t* binary_message, + const size_t message_size) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeMessageInternal( + const T& message) const = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_MESSAGE_CODEC_H_ diff --git a/linux/flutter/include/flutter/method_call.h b/linux/flutter/include/flutter/method_call.h new file mode 100644 index 0000000000..f9a9c287ed --- /dev/null +++ b/linux/flutter/include/flutter/method_call.h @@ -0,0 +1,43 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TYPED_METHOD_CALL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TYPED_METHOD_CALL_H_ + +#include +#include + +namespace flutter { + +class EncodableValue; + +// An object encapsulating a method call from Flutter whose arguments are of +// type T. +template +class MethodCall { + public: + // Creates a MethodCall with the given name and arguments. + MethodCall(const std::string& method_name, std::unique_ptr arguments) + : method_name_(method_name), arguments_(std::move(arguments)) {} + + virtual ~MethodCall() = default; + + // Prevent copying. + MethodCall(MethodCall const&) = delete; + MethodCall& operator=(MethodCall const&) = delete; + + // The name of the method being called. + const std::string& method_name() const { return method_name_; } + + // The arguments to the method call, or NULL if there are none. + const T* arguments() const { return arguments_.get(); } + + private: + std::string method_name_; + std::unique_ptr arguments_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TYPED_METHOD_CALL_H_ diff --git a/linux/flutter/include/flutter/method_channel.h b/linux/flutter/include/flutter/method_channel.h new file mode 100644 index 0000000000..e9ed6161c4 --- /dev/null +++ b/linux/flutter/include/flutter/method_channel.h @@ -0,0 +1,132 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CHANNEL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CHANNEL_H_ + +#include +#include + +#include "binary_messenger.h" +#include "engine_method_result.h" +#include "method_call.h" +#include "method_codec.h" +#include "method_result.h" + +namespace flutter { + +class EncodableValue; + +// A handler for receiving a method call from the Flutter engine. +// +// Implementations must asynchronously call exactly one of the methods on +// |result| to indicate the result of the method call. +template +using MethodCallHandler = + std::function& call, + std::unique_ptr> result)>; + +// A channel for communicating with the Flutter engine using invocation of +// asynchronous methods. +template +class MethodChannel { + public: + // Creates an instance that sends and receives method calls on the channel + // named |name|, encoded with |codec| and dispatched via |messenger|. + MethodChannel(BinaryMessenger* messenger, + const std::string& name, + const MethodCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + + ~MethodChannel() = default; + + // Prevent copying. + MethodChannel(MethodChannel const&) = delete; + MethodChannel& operator=(MethodChannel const&) = delete; + + // Sends a message to the Flutter engine on this channel. + // + // If |result| is provided, one of its methods will be invoked with the + // response from the engine. + void InvokeMethod(const std::string& method, + std::unique_ptr arguments, + std::unique_ptr> result = nullptr) { + MethodCall method_call(method, std::move(arguments)); + std::unique_ptr> message = + codec_->EncodeMethodCall(method_call); + if (!result) { + messenger_->Send(name_, message->data(), message->size(), nullptr); + return; + } + + // std::function requires a copyable lambda, so convert to a shared pointer. + // This is safe since only one copy of the shared_pointer will ever be + // accessed. + std::shared_ptr> shared_result(result.release()); + const auto* codec = codec_; + std::string channel_name = name_; + BinaryReply reply_handler = [shared_result, codec, channel_name]( + const uint8_t* reply, size_t reply_size) { + if (reply_size == 0) { + shared_result->NotImplemented(); + return; + } + // Use this channel's codec to decode and handle the + // reply. + bool decoded = codec->DecodeAndProcessResponseEnvelope( + reply, reply_size, shared_result.get()); + if (!decoded) { + std::cerr << "Unable to decode reply to method " + "invocation on channel " + << channel_name << std::endl; + shared_result->NotImplemented(); + } + }; + + messenger_->Send(name_, message->data(), message->size(), + std::move(reply_handler)); + } + + // Registers a handler that should be called any time a method call is + // received on this channel. A null handler will remove any previous handler. + // + // Note that the MethodChannel does not own the handler, and will not + // unregister it on destruction, so the caller is responsible for + // unregistering explicitly if it should no longer be called. + void SetMethodCallHandler(MethodCallHandler handler) const { + if (!handler) { + messenger_->SetMessageHandler(name_, nullptr); + return; + } + const auto* codec = codec_; + std::string channel_name = name_; + BinaryMessageHandler binary_handler = [handler, codec, channel_name]( + const uint8_t* message, + size_t message_size, + BinaryReply reply) { + // Use this channel's codec to decode the call and build a result handler. + auto result = + std::make_unique>(std::move(reply), codec); + std::unique_ptr> method_call = + codec->DecodeMethodCall(message, message_size); + if (!method_call) { + std::cerr << "Unable to construct method call from message on channel " + << channel_name << std::endl; + result->NotImplemented(); + return; + } + handler(*method_call, std::move(result)); + }; + messenger_->SetMessageHandler(name_, std::move(binary_handler)); + } + + private: + BinaryMessenger* messenger_; + std::string name_; + const MethodCodec* codec_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CHANNEL_H_ diff --git a/linux/flutter/include/flutter/method_codec.h b/linux/flutter/include/flutter/method_codec.h new file mode 100644 index 0000000000..b40fa640c9 --- /dev/null +++ b/linux/flutter/include/flutter/method_codec.h @@ -0,0 +1,111 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CODEC_H_ + +#include +#include +#include + +#include "method_call.h" +#include "method_result.h" + +namespace flutter { + +// Translates between a binary message and higher-level method call and +// response/error objects. +template +class MethodCodec { + public: + MethodCodec() = default; + + virtual ~MethodCodec() = default; + + // Prevent copying. + MethodCodec(MethodCodec const&) = delete; + MethodCodec& operator=(MethodCodec const&) = delete; + + // Returns the MethodCall encoded in |message|, or nullptr if it cannot be + // decoded. + std::unique_ptr> DecodeMethodCall(const uint8_t* message, + size_t message_size) const { + return std::move(DecodeMethodCallInternal(message, message_size)); + } + + // Returns the MethodCall encoded in |message|, or nullptr if it cannot be + // decoded. + std::unique_ptr> DecodeMethodCall( + const std::vector& message) const { + size_t size = message.size(); + const uint8_t* data = size > 0 ? &message[0] : nullptr; + return std::move(DecodeMethodCallInternal(data, size)); + } + + // Returns a binary encoding of the given |method_call|, or nullptr if the + // method call cannot be serialized by this codec. + std::unique_ptr> EncodeMethodCall( + const MethodCall& method_call) const { + return std::move(EncodeMethodCallInternal(method_call)); + } + + // Returns a binary encoding of |result|. |result| must be a type supported + // by the codec. + std::unique_ptr> EncodeSuccessEnvelope( + const T* result = nullptr) const { + return std::move(EncodeSuccessEnvelopeInternal(result)); + } + + // Returns a binary encoding of |error|. The |error_details| must be a type + // supported by the codec. + std::unique_ptr> EncodeErrorEnvelope( + const std::string& error_code, + const std::string& error_message = "", + const T* error_details = nullptr) const { + return std::move( + EncodeErrorEnvelopeInternal(error_code, error_message, error_details)); + } + + // Decodes the response envelope encoded in |response|, calling the + // appropriate method on |result|. + // + // Returns false if |response| cannot be decoded. In that case the caller is + // responsible for calling a |result| method. + bool DecodeAndProcessResponseEnvelope(const uint8_t* response, + size_t response_size, + MethodResult* result) const { + return DecodeAndProcessResponseEnvelopeInternal(response, response_size, + result); + } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> DecodeMethodCallInternal( + const uint8_t* message, + size_t message_size) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeMethodCallInternal( + const MethodCall& method_call) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeSuccessEnvelopeInternal( + const T* result) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeErrorEnvelopeInternal( + const std::string& error_code, + const std::string& error_message, + const T* error_details) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual bool DecodeAndProcessResponseEnvelopeInternal( + const uint8_t* response, + size_t response_size, + MethodResult* result) const = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CODEC_H_ diff --git a/linux/flutter/include/flutter/method_result.h b/linux/flutter/include/flutter/method_result.h new file mode 100644 index 0000000000..05c7fe9687 --- /dev/null +++ b/linux/flutter/include/flutter/method_result.h @@ -0,0 +1,76 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_H_ + +#include + +namespace flutter { + +class EncodableValue; + +// Encapsulates a result returned from a MethodCall. Only one method should be +// called on any given instance. +template +class MethodResult { + public: + MethodResult() = default; + + virtual ~MethodResult() = default; + + // Prevent copying. + MethodResult(MethodResult const&) = delete; + MethodResult& operator=(MethodResult const&) = delete; + + // Sends a success response, indicating that the call completed successfully + // with the given result. + void Success(const T& result) { SuccessInternal(&result); } + + // Sends a success response, indicating that the call completed successfully + // with no result. + void Success() { SuccessInternal(nullptr); } + + // Sends an error response, indicating that the call was understood but + // handling failed in some way. + // + // error_code: A string error code describing the error. + // error_message: A user-readable error message. + // error_details: Arbitrary extra details about the error. + void Error(const std::string& error_code, + const std::string& error_message, + const T& error_details) { + ErrorInternal(error_code, error_message, &error_details); + } + + // Sends an error response, indicating that the call was understood but + // handling failed in some way. + // + // error_code: A string error code describing the error. + // error_message: A user-readable error message (optional). + void Error(const std::string& error_code, + const std::string& error_message = "") { + ErrorInternal(error_code, error_message, nullptr); + } + + // Sends a not-implemented response, indicating that the method either was not + // recognized, or has not been implemented. + void NotImplemented() { NotImplementedInternal(); } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual void SuccessInternal(const T* result) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void NotImplementedInternal() = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_H_ diff --git a/linux/flutter/include/flutter/method_result_functions.h b/linux/flutter/include/flutter/method_result_functions.h new file mode 100644 index 0000000000..a19cc349d1 --- /dev/null +++ b/linux/flutter/include/flutter/method_result_functions.h @@ -0,0 +1,79 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_ + +#include +#include + +#include "method_result.h" + +namespace flutter { + +class EncodableValue; + +// Handler types for each of the MethodResult outcomes. +template +using ResultHandlerSuccess = std::function; +template +using ResultHandlerError = std::function; +template +using ResultHandlerNotImplemented = std::function; + +// An implementation of MethodResult that pass calls through to provided +// function objects, for ease of constructing one-off result handlers. +template +class MethodResultFunctions : public MethodResult { + public: + // Creates a result object that calls the provided functions for the + // corresponding MethodResult outcomes. + MethodResultFunctions(ResultHandlerSuccess on_success, + ResultHandlerError on_error, + ResultHandlerNotImplemented on_not_implemented) + : on_success_(on_success), + on_error_(on_error), + on_not_implemented_(on_not_implemented) {} + + virtual ~MethodResultFunctions() = default; + + // Prevent copying. + MethodResultFunctions(MethodResultFunctions const&) = delete; + MethodResultFunctions& operator=(MethodResultFunctions const&) = delete; + + protected: + // |flutter::MethodResult| + void SuccessInternal(const T* result) override { + if (on_success_) { + on_success_(result); + } + } + + // |flutter::MethodResult| + void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) override { + if (on_error_) { + on_error_(error_code, error_message, error_details); + } + } + + // |flutter::MethodResult| + void NotImplementedInternal() override { + if (on_not_implemented_) { + on_not_implemented_(); + } + } + + private: + ResultHandlerSuccess on_success_; + ResultHandlerError on_error_; + ResultHandlerNotImplemented on_not_implemented_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_ diff --git a/linux/flutter/include/flutter/plugin_registrar.h b/linux/flutter/include/flutter/plugin_registrar.h new file mode 100644 index 0000000000..26a6c3b7ed --- /dev/null +++ b/linux/flutter/include/flutter/plugin_registrar.h @@ -0,0 +1,131 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_PLUGIN_REGISTRAR_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_PLUGIN_REGISTRAR_H_ + +#include + +#include +#include +#include +#include + +#include "binary_messenger.h" +#include "texture_registrar.h" + +namespace flutter { + +class Plugin; + +// A object managing the registration of a plugin for various events. +// +// Currently this class has very limited functionality, but is expected to +// expand over time to more closely match the functionality of +// the Flutter mobile plugin APIs' plugin registrars. +class PluginRegistrar { + public: + // Creates a new PluginRegistrar. |core_registrar| and the messenger it + // provides must remain valid as long as this object exists. + explicit PluginRegistrar(FlPluginRegistrar* core_registrar); + + virtual ~PluginRegistrar(); + + // Prevent copying. + PluginRegistrar(PluginRegistrar const&) = delete; + PluginRegistrar& operator=(PluginRegistrar const&) = delete; + + // Returns the messenger to use for creating channels to communicate with the + // Flutter engine. + // + // This pointer will remain valid for the lifetime of this instance. + BinaryMessenger* messenger() { return messenger_.get(); } + + // Returns the texture registrar to use for the plugin to render a pixel + // buffer. + TextureRegistrar* texture_registrar() { return texture_registrar_.get(); } + + // Takes ownership of |plugin|. + // + // Plugins are not required to call this method if they have other lifetime + // management, but this is a convient place for plugins to be owned to ensure + // that they stay valid for any registered callbacks. + void AddPlugin(std::unique_ptr plugin); + + protected: + FlPluginRegistrar* registrar() { return registrar_; } + + // Destroys all owned plugins. Subclasses should call this at the beginning of + // their destructors to prevent the possibility of an owned plugin trying to + // access destroyed state during its own destruction. + void ClearPlugins(); + + private: + // Handle for interacting with the C API's registrar. + FlPluginRegistrar* registrar_; + + std::unique_ptr messenger_; + + std::unique_ptr texture_registrar_; + + // Plugins registered for ownership. + std::set> plugins_; +}; + +// A plugin that can be registered for ownership by a PluginRegistrar. +class Plugin { + public: + virtual ~Plugin() = default; +}; + +// A singleton to own PluginRegistrars. This is intended for use in plugins, +// where there is no higher-level object to own a PluginRegistrar that can +// own plugin instances and ensure that they live as long as the engine they +// are registered with. +class PluginRegistrarManager { + public: + static PluginRegistrarManager* GetInstance(); + + // Prevent copying. + PluginRegistrarManager(PluginRegistrarManager const&) = delete; + PluginRegistrarManager& operator=(PluginRegistrarManager const&) = delete; + + // Returns a plugin registrar wrapper of type T, which must be a kind of + // PluginRegistrar, creating it if necessary. The returned registrar will + // live as long as the underlying FlutterDesktopPluginRegistrarRef, so + // can be used to own plugin instances. + // + // Calling this multiple times for the same registrar_ref with different + // template types results in undefined behavior. + template + T* GetRegistrar(FlPluginRegistrar* registrar_ref) { + auto insert_result = + registrars_.emplace(registrar_ref, std::make_unique(registrar_ref)); + auto& registrar_pair = *(insert_result.first); + FlutterDesktopPluginRegistrarSetDestructionHandler(registrar_pair.first, + OnRegistrarDestroyed); + return static_cast(registrar_pair.second.get()); + } + + // Destroys all registrar wrappers created by the manager. + // + // This is intended primarily for use in tests. + void Reset() { registrars_.clear(); } + + private: + PluginRegistrarManager(); + + using WrapperMap = + std::map>; + + static void OnRegistrarDestroyed(FlPluginRegistrar* registrar); + + WrapperMap* registrars() { return ®istrars_; } + + WrapperMap registrars_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_PLUGIN_REGISTRAR_H_ diff --git a/linux/flutter/include/flutter/standard_codec_serializer.h b/linux/flutter/include/flutter/standard_codec_serializer.h new file mode 100644 index 0000000000..a6001037f9 --- /dev/null +++ b/linux/flutter/include/flutter/standard_codec_serializer.h @@ -0,0 +1,76 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_CODEC_SERIALIZER_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_CODEC_SERIALIZER_H_ + +#include "byte_streams.h" +#include "encodable_value.h" + +namespace flutter { + +// Encapsulates the logic for encoding/decoding EncodableValues to/from the +// standard codec binary representation. +// +// This can be subclassed to extend the standard codec with support for new +// types. +class StandardCodecSerializer { + public: + virtual ~StandardCodecSerializer(); + + // Returns the shared serializer instance. + static const StandardCodecSerializer& GetInstance(); + + // Prevent copying. + StandardCodecSerializer(StandardCodecSerializer const&) = delete; + StandardCodecSerializer& operator=(StandardCodecSerializer const&) = delete; + + // Reads and returns the next value from |stream|. + EncodableValue ReadValue(ByteStreamReader* stream) const; + + // Writes the encoding of |value| to |stream|, including the initial type + // discrimination byte. + // + // Can be overridden by a subclass to extend the codec. + virtual void WriteValue(const EncodableValue& value, + ByteStreamWriter* stream) const; + + protected: + // Codecs require long-lived serializers, so clients should always use + // GetInstance(). + StandardCodecSerializer(); + + // Reads and returns the next value from |stream|, whose discrimination byte + // was |type|. + // + // The discrimination byte will already have been read from the stream when + // this is called. + // + // Can be overridden by a subclass to extend the codec. + virtual EncodableValue ReadValueOfType(uint8_t type, + ByteStreamReader* stream) const; + + // Reads the variable-length size from the current position in |stream|. + size_t ReadSize(ByteStreamReader* stream) const; + + // Writes the variable-length size encoding to |stream|. + void WriteSize(size_t size, ByteStreamWriter* stream) const; + + private: + // Reads a fixed-type list whose values are of type T from the current + // position in |stream|, and returns it as the corresponding EncodableValue. + // |T| must correspond to one of the supported list value types of + // EncodableValue. + template + EncodableValue ReadVector(ByteStreamReader* stream) const; + + // Writes |vector| to |stream| as a fixed-type list. |T| must correspond to + // one of the supported list value types of EncodableValue. + template + void WriteVector(const std::vector vector, ByteStreamWriter* stream) const; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_CODEC_SERIALIZER_H_ diff --git a/linux/flutter/include/flutter/standard_message_codec.h b/linux/flutter/include/flutter/standard_message_codec.h new file mode 100644 index 0000000000..568b3917b3 --- /dev/null +++ b/linux/flutter/include/flutter/standard_message_codec.h @@ -0,0 +1,57 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_MESSAGE_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_MESSAGE_CODEC_H_ + +#include + +#include "encodable_value.h" +#include "message_codec.h" +#include "standard_codec_serializer.h" + +namespace flutter { + +// A binary message encoding/decoding mechanism for communications to/from the +// Flutter engine via message channels. +class StandardMessageCodec : public MessageCodec { + public: + // Returns an instance of the codec, optionally using a custom serializer to + // add support for more types. + // + // If provided, |serializer| must be long-lived. If no serializer is provided, + // the default will be used. + // + // The instance returned for a given |serializer| will be shared, and + // any instance returned from this will be long-lived, and can be safely + // passed to, e.g., channel constructors. + static const StandardMessageCodec& GetInstance( + const StandardCodecSerializer* serializer = nullptr); + + ~StandardMessageCodec(); + + // Prevent copying. + StandardMessageCodec(StandardMessageCodec const&) = delete; + StandardMessageCodec& operator=(StandardMessageCodec const&) = delete; + + protected: + // |flutter::MessageCodec| + std::unique_ptr DecodeMessageInternal( + const uint8_t* binary_message, + const size_t message_size) const override; + + // |flutter::MessageCodec| + std::unique_ptr> EncodeMessageInternal( + const EncodableValue& message) const override; + + private: + // Instances should be obtained via GetInstance. + explicit StandardMessageCodec(const StandardCodecSerializer* serializer); + + const StandardCodecSerializer* serializer_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_MESSAGE_CODEC_H_ diff --git a/linux/flutter/include/flutter/standard_method_codec.h b/linux/flutter/include/flutter/standard_method_codec.h new file mode 100644 index 0000000000..b6db35ec97 --- /dev/null +++ b/linux/flutter/include/flutter/standard_method_codec.h @@ -0,0 +1,73 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_METHOD_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_METHOD_CODEC_H_ + +#include + +#include "encodable_value.h" +#include "method_call.h" +#include "method_codec.h" +#include "standard_codec_serializer.h" + +namespace flutter { + +// An implementation of MethodCodec that uses a binary serialization. +class StandardMethodCodec : public MethodCodec { + public: + // Returns an instance of the codec, optionally using a custom serializer to + // add support for more types. + // + // If provided, |serializer| must be long-lived. If no serializer is provided, + // the default will be used. + // + // The instance returned for a given |extension| will be shared, and + // any instance returned from this will be long-lived, and can be safely + // passed to, e.g., channel constructors. + static const StandardMethodCodec& GetInstance( + const StandardCodecSerializer* serializer = nullptr); + + ~StandardMethodCodec(); + + // Prevent copying. + StandardMethodCodec(StandardMethodCodec const&) = delete; + StandardMethodCodec& operator=(StandardMethodCodec const&) = delete; + + protected: + // |flutter::MethodCodec| + std::unique_ptr> DecodeMethodCallInternal( + const uint8_t* message, + size_t message_size) const override; + + // |flutter::MethodCodec| + std::unique_ptr> EncodeMethodCallInternal( + const MethodCall& method_call) const override; + + // |flutter::MethodCodec| + std::unique_ptr> EncodeSuccessEnvelopeInternal( + const EncodableValue* result) const override; + + // |flutter::MethodCodec| + std::unique_ptr> EncodeErrorEnvelopeInternal( + const std::string& error_code, + const std::string& error_message, + const EncodableValue* error_details) const override; + + // |flutter::MethodCodec| + bool DecodeAndProcessResponseEnvelopeInternal( + const uint8_t* response, + size_t response_size, + MethodResult* result) const override; + + private: + // Instances should be obtained via GetInstance. + explicit StandardMethodCodec(const StandardCodecSerializer* serializer); + + const StandardCodecSerializer* serializer_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_METHOD_CODEC_H_ diff --git a/linux/flutter/include/flutter/texture_registrar.h b/linux/flutter/include/flutter/texture_registrar.h new file mode 100644 index 0000000000..10d0111769 --- /dev/null +++ b/linux/flutter/include/flutter/texture_registrar.h @@ -0,0 +1,89 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_ + +//#include + +#include +#include +#include +#include + +// An image buffer object. +typedef struct { + // The pixel data buffer. + const uint8_t* buffer; + // Width of the pixel buffer. + size_t width; + // Height of the pixel buffer. + size_t height; + // An optional callback that gets invoked when the |buffer| can be released. + void (*release_callback)(void* release_context); + // Opaque data passed to |release_callback|. + void* release_context; +} FlutterDesktopPixelBuffer; + +namespace flutter { + +// A pixel buffer texture. +class PixelBufferTexture { + public: + // A callback used for retrieving pixel buffers. + typedef std::function + CopyBufferCallback; + + // Creates a pixel buffer texture that uses the provided |copy_buffer_cb| to + // retrieve the buffer. + // As the callback is usually invoked from the render thread, the callee must + // take care of proper synchronization. It also needs to be ensured that the + // returned buffer isn't released prior to unregistering this texture. + explicit PixelBufferTexture(CopyBufferCallback copy_buffer_callback) + : copy_buffer_callback_(copy_buffer_callback) {} + + // Returns the callback-provided FlutterDesktopPixelBuffer that contains the + // actual pixel data. The intended surface size is specified by |width| and + // |height|. + const FlutterDesktopPixelBuffer* CopyPixelBuffer(size_t width, + size_t height) const { + return copy_buffer_callback_(width, height); + } + + private: + const CopyBufferCallback copy_buffer_callback_; +}; + +// The available texture variants. +// Only PixelBufferTexture is currently implemented. +// Other variants are expected to be added in the future. +typedef std::variant TextureVariant; + +// An object keeping track of external textures. +// +// Thread safety: +// It's safe to call the member methods from any thread. +class TextureRegistrar { + public: + virtual ~TextureRegistrar() = default; + + // Registers a |texture| object and returns the ID for that texture. + virtual int64_t RegisterTexture(TextureVariant* texture) = 0; + + // Notifies the flutter engine that the texture object corresponding + // to |texure_id| needs to render a new frame. + // + // For PixelBufferTextures, this will effectively make the engine invoke + // the callback that was provided upon creating the texture. + virtual bool MarkTextureFrameAvailable(int64_t texture_id) = 0; + + // Unregisters an existing Texture object. + // Textures must not be unregistered while they're in use. + virtual bool UnregisterTexture(int64_t texture_id) = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_ diff --git a/linux/flutter/plugin_registrar.cc b/linux/flutter/plugin_registrar.cc new file mode 100644 index 0000000000..5e61d9042c --- /dev/null +++ b/linux/flutter/plugin_registrar.cc @@ -0,0 +1,60 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "include/flutter/plugin_registrar.h" + +#include +#include + +#include "binary_messenger_impl.h" +#include "include/flutter/engine_method_result.h" +#include "include/flutter/method_channel.h" +#include "texture_registrar_impl.h" + +namespace flutter { + +// ===== PluginRegistrar ===== + +PluginRegistrar::PluginRegistrar(FlPluginRegistrar* registrar) + : registrar_(registrar) { + auto core_messenger = fl_plugin_registrar_get_messenger(registrar); + messenger_ = std::make_unique(core_messenger); + auto texture_registrar = fl_plugin_registrar_get_texture_registrar(registrar); + texture_registrar_ = + std::make_unique(texture_registrar); +} + +PluginRegistrar::~PluginRegistrar() { + // This must always be the first call. + ClearPlugins(); + + // Explicitly cleared to facilitate testing of destruction order. + messenger_.reset(); +} + +void PluginRegistrar::AddPlugin(std::unique_ptr plugin) { + plugins_.insert(std::move(plugin)); +} + +void PluginRegistrar::ClearPlugins() { + plugins_.clear(); +} + +// ===== PluginRegistrarManager ===== + +// static +PluginRegistrarManager* PluginRegistrarManager::GetInstance() { + static PluginRegistrarManager* instance = new PluginRegistrarManager(); + return instance; +} + +PluginRegistrarManager::PluginRegistrarManager() = default; + +// static +void PluginRegistrarManager::OnRegistrarDestroyed( + FlPluginRegistrar* registrar) { + GetInstance()->registrars()->erase(registrar); +} + +} // namespace flutter diff --git a/linux/flutter/standard_codec.cc b/linux/flutter/standard_codec.cc new file mode 100644 index 0000000000..807e06816b --- /dev/null +++ b/linux/flutter/standard_codec.cc @@ -0,0 +1,466 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains what would normally be standard_codec_serializer.cc, +// standard_message_codec.cc, and standard_method_codec.cc. They are grouped +// together to simplify use of the client wrapper, since the common case is +// that any client that needs one of these files needs all three. + +#include +#include +#include +#include +#include +#include + +#include "byte_buffer_streams.h" +#include "include/flutter/standard_codec_serializer.h" +#include "include/flutter/standard_message_codec.h" +#include "include/flutter/standard_method_codec.h" + +namespace flutter { + +// ===== standard_codec_serializer.h ===== + +namespace { + +// The order/values here must match the constants in message_codecs.dart. +enum class EncodedType { + kNull = 0, + kTrue, + kFalse, + kInt32, + kInt64, + kLargeInt, // No longer used. If encountered, treat as kString. + kFloat64, + kString, + kUInt8List, + kInt32List, + kInt64List, + kFloat64List, + kList, + kMap, + kFloat32List, +}; + +// Returns the encoded type that should be written when serializing |value|. +EncodedType EncodedTypeForValue(const EncodableValue& value) { + switch (value.index()) { + case 0: + return EncodedType::kNull; + case 1: + return std::get(value) ? EncodedType::kTrue : EncodedType::kFalse; + case 2: + return EncodedType::kInt32; + case 3: + return EncodedType::kInt64; + case 4: + return EncodedType::kFloat64; + case 5: + return EncodedType::kString; + case 6: + return EncodedType::kUInt8List; + case 7: + return EncodedType::kInt32List; + case 8: + return EncodedType::kInt64List; + case 9: + return EncodedType::kFloat64List; + case 10: + return EncodedType::kList; + case 11: + return EncodedType::kMap; + case 13: + return EncodedType::kFloat32List; + } + assert(false); + return EncodedType::kNull; +} + +} // namespace + +StandardCodecSerializer::StandardCodecSerializer() = default; + +StandardCodecSerializer::~StandardCodecSerializer() = default; + +const StandardCodecSerializer& StandardCodecSerializer::GetInstance() { + static StandardCodecSerializer sInstance; + return sInstance; +}; + +EncodableValue StandardCodecSerializer::ReadValue( + ByteStreamReader* stream) const { + uint8_t type = stream->ReadByte(); + return ReadValueOfType(type, stream); +} + +void StandardCodecSerializer::WriteValue(const EncodableValue& value, + ByteStreamWriter* stream) const { + stream->WriteByte(static_cast(EncodedTypeForValue(value))); + // TODO: Consider replacing this this with a std::visitor. + switch (value.index()) { + case 0: + case 1: + // Null and bool are encoded directly in the type. + break; + case 2: + stream->WriteInt32(std::get(value)); + break; + case 3: + stream->WriteInt64(std::get(value)); + break; + case 4: + stream->WriteAlignment(8); + stream->WriteDouble(std::get(value)); + break; + case 5: { + const auto& string_value = std::get(value); + size_t size = string_value.size(); + WriteSize(size, stream); + if (size > 0) { + stream->WriteBytes( + reinterpret_cast(string_value.data()), size); + } + break; + } + case 6: + WriteVector(std::get>(value), stream); + break; + case 7: + WriteVector(std::get>(value), stream); + break; + case 8: + WriteVector(std::get>(value), stream); + break; + case 9: + WriteVector(std::get>(value), stream); + break; + case 10: { + const auto& list = std::get(value); + WriteSize(list.size(), stream); + for (const auto& item : list) { + WriteValue(item, stream); + } + break; + } + case 11: { + const auto& map = std::get(value); + WriteSize(map.size(), stream); + for (const auto& pair : map) { + WriteValue(pair.first, stream); + WriteValue(pair.second, stream); + } + break; + } + case 12: + std::cerr + << "Unhandled custom type in StandardCodecSerializer::WriteValue. " + << "Custom types require codec extensions." << std::endl; + break; + case 13: { + WriteVector(std::get>(value), stream); + break; + } + } +} + +EncodableValue StandardCodecSerializer::ReadValueOfType( + uint8_t type, + ByteStreamReader* stream) const { + switch (static_cast(type)) { + case EncodedType::kNull: + return EncodableValue(); + case EncodedType::kTrue: + return EncodableValue(true); + case EncodedType::kFalse: + return EncodableValue(false); + case EncodedType::kInt32: + return EncodableValue(stream->ReadInt32()); + case EncodedType::kInt64: + return EncodableValue(stream->ReadInt64()); + case EncodedType::kFloat64: + stream->ReadAlignment(8); + return EncodableValue(stream->ReadDouble()); + case EncodedType::kLargeInt: + case EncodedType::kString: { + size_t size = ReadSize(stream); + std::string string_value; + string_value.resize(size); + stream->ReadBytes(reinterpret_cast(&string_value[0]), size); + return EncodableValue(string_value); + } + case EncodedType::kUInt8List: + return ReadVector(stream); + case EncodedType::kInt32List: + return ReadVector(stream); + case EncodedType::kInt64List: + return ReadVector(stream); + case EncodedType::kFloat64List: + return ReadVector(stream); + case EncodedType::kList: { + size_t length = ReadSize(stream); + EncodableList list_value; + list_value.reserve(length); + for (size_t i = 0; i < length; ++i) { + list_value.push_back(ReadValue(stream)); + } + return EncodableValue(list_value); + } + case EncodedType::kMap: { + size_t length = ReadSize(stream); + EncodableMap map_value; + for (size_t i = 0; i < length; ++i) { + EncodableValue key = ReadValue(stream); + EncodableValue value = ReadValue(stream); + map_value.emplace(std::move(key), std::move(value)); + } + return EncodableValue(map_value); + } + case EncodedType::kFloat32List: { + return ReadVector(stream); + } + } + std::cerr << "Unknown type in StandardCodecSerializer::ReadValueOfType: " + << static_cast(type) << std::endl; + return EncodableValue(); +} + +size_t StandardCodecSerializer::ReadSize(ByteStreamReader* stream) const { + uint8_t byte = stream->ReadByte(); + if (byte < 254) { + return byte; + } else if (byte == 254) { + uint16_t value = 0; + stream->ReadBytes(reinterpret_cast(&value), 2); + return value; + } else { + uint32_t value = 0; + stream->ReadBytes(reinterpret_cast(&value), 4); + return value; + } +} + +void StandardCodecSerializer::WriteSize(size_t size, + ByteStreamWriter* stream) const { + if (size < 254) { + stream->WriteByte(static_cast(size)); + } else if (size <= 0xffff) { + stream->WriteByte(254); + uint16_t value = static_cast(size); + stream->WriteBytes(reinterpret_cast(&value), 2); + } else { + stream->WriteByte(255); + uint32_t value = static_cast(size); + stream->WriteBytes(reinterpret_cast(&value), 4); + } +} + +template +EncodableValue StandardCodecSerializer::ReadVector( + ByteStreamReader* stream) const { + size_t count = ReadSize(stream); + std::vector vector; + vector.resize(count); + uint8_t type_size = static_cast(sizeof(T)); + if (type_size > 1) { + stream->ReadAlignment(type_size); + } + stream->ReadBytes(reinterpret_cast(vector.data()), + count * type_size); + return EncodableValue(vector); +} + +template +void StandardCodecSerializer::WriteVector(const std::vector vector, + ByteStreamWriter* stream) const { + size_t count = vector.size(); + WriteSize(count, stream); + if (count == 0) { + return; + } + uint8_t type_size = static_cast(sizeof(T)); + if (type_size > 1) { + stream->WriteAlignment(type_size); + } + stream->WriteBytes(reinterpret_cast(vector.data()), + count * type_size); +} + +// ===== standard_message_codec.h ===== + +// static +const StandardMessageCodec& StandardMessageCodec::GetInstance( + const StandardCodecSerializer* serializer) { + if (!serializer) { + serializer = &StandardCodecSerializer::GetInstance(); + } + static auto* sInstances = new std::map>; + auto it = sInstances->find(serializer); + if (it == sInstances->end()) { + // Uses new due to private constructor (to prevent API clients from + // accidentally passing temporary codec instances to channels). + auto emplace_result = sInstances->emplace( + serializer, std::unique_ptr( + new StandardMessageCodec(serializer))); + it = emplace_result.first; + } + return *(it->second); +} + +StandardMessageCodec::StandardMessageCodec( + const StandardCodecSerializer* serializer) + : serializer_(serializer) {} + +StandardMessageCodec::~StandardMessageCodec() = default; + +std::unique_ptr StandardMessageCodec::DecodeMessageInternal( + const uint8_t* binary_message, + size_t message_size) const { + if (!binary_message) { + return std::make_unique(); + } + ByteBufferStreamReader stream(binary_message, message_size); + return std::make_unique(serializer_->ReadValue(&stream)); +} + +std::unique_ptr> +StandardMessageCodec::EncodeMessageInternal( + const EncodableValue& message) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + serializer_->WriteValue(message, &stream); + return encoded; +} + +// ===== standard_method_codec.h ===== + +// static +const StandardMethodCodec& StandardMethodCodec::GetInstance( + const StandardCodecSerializer* serializer) { + if (!serializer) { + serializer = &StandardCodecSerializer::GetInstance(); + } + static auto* sInstances = new std::map>; + auto it = sInstances->find(serializer); + if (it == sInstances->end()) { + // Uses new due to private constructor (to prevent API clients from + // accidentally passing temporary codec instances to channels). + auto emplace_result = sInstances->emplace( + serializer, std::unique_ptr( + new StandardMethodCodec(serializer))); + it = emplace_result.first; + } + return *(it->second); +} + +StandardMethodCodec::StandardMethodCodec( + const StandardCodecSerializer* serializer) + : serializer_(serializer) {} + +StandardMethodCodec::~StandardMethodCodec() = default; + +std::unique_ptr> +StandardMethodCodec::DecodeMethodCallInternal(const uint8_t* message, + size_t message_size) const { + ByteBufferStreamReader stream(message, message_size); + EncodableValue method_name_value = serializer_->ReadValue(&stream); + const auto* method_name = std::get_if(&method_name_value); + if (!method_name) { + std::cerr << "Invalid method call; method name is not a string." + << std::endl; + return nullptr; + } + auto arguments = + std::make_unique(serializer_->ReadValue(&stream)); + return std::make_unique>(*method_name, + std::move(arguments)); +} + +std::unique_ptr> +StandardMethodCodec::EncodeMethodCallInternal( + const MethodCall& method_call) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + serializer_->WriteValue(EncodableValue(method_call.method_name()), &stream); + if (method_call.arguments()) { + serializer_->WriteValue(*method_call.arguments(), &stream); + } else { + serializer_->WriteValue(EncodableValue(), &stream); + } + return encoded; +} + +std::unique_ptr> +StandardMethodCodec::EncodeSuccessEnvelopeInternal( + const EncodableValue* result) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + stream.WriteByte(0); + if (result) { + serializer_->WriteValue(*result, &stream); + } else { + serializer_->WriteValue(EncodableValue(), &stream); + } + return encoded; +} + +std::unique_ptr> +StandardMethodCodec::EncodeErrorEnvelopeInternal( + const std::string& error_code, + const std::string& error_message, + const EncodableValue* error_details) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + stream.WriteByte(1); + serializer_->WriteValue(EncodableValue(error_code), &stream); + if (error_message.empty()) { + serializer_->WriteValue(EncodableValue(), &stream); + } else { + serializer_->WriteValue(EncodableValue(error_message), &stream); + } + if (error_details) { + serializer_->WriteValue(*error_details, &stream); + } else { + serializer_->WriteValue(EncodableValue(), &stream); + } + return encoded; +} + +bool StandardMethodCodec::DecodeAndProcessResponseEnvelopeInternal( + const uint8_t* response, + size_t response_size, + MethodResult* result) const { + ByteBufferStreamReader stream(response, response_size); + uint8_t flag = stream.ReadByte(); + switch (flag) { + case 0: { + EncodableValue value = serializer_->ReadValue(&stream); + if (value.IsNull()) { + result->Success(); + } else { + result->Success(value); + } + return true; + } + case 1: { + EncodableValue code = serializer_->ReadValue(&stream); + EncodableValue message = serializer_->ReadValue(&stream); + EncodableValue details = serializer_->ReadValue(&stream); + const std::string& message_string = + message.IsNull() ? "" : std::get(message); + if (details.IsNull()) { + result->Error(std::get(code), message_string); + } else { + result->Error(std::get(code), message_string, details); + } + return true; + } + default: + return false; + } +} + +} // namespace flutter diff --git a/linux/flutter/texture_registrar_impl.h b/linux/flutter/texture_registrar_impl.h new file mode 100644 index 0000000000..8659b73e65 --- /dev/null +++ b/linux/flutter/texture_registrar_impl.h @@ -0,0 +1,42 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_TEXTURE_REGISTRAR_IMPL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_TEXTURE_REGISTRAR_IMPL_H_ + +#include "include/flutter/texture_registrar.h" + +struct FlTextureProxy; + +namespace flutter { + +// Wrapper around a FlTextureRegistrar that implements the +// TextureRegistrar API. +class TextureRegistrarImpl : public TextureRegistrar { + public: + explicit TextureRegistrarImpl(FlTextureRegistrar* texture_registrar_ref); + virtual ~TextureRegistrarImpl(); + + // Prevent copying. + TextureRegistrarImpl(TextureRegistrarImpl const&) = delete; + TextureRegistrarImpl& operator=(TextureRegistrarImpl const&) = delete; + + // |flutter::TextureRegistrar| + int64_t RegisterTexture(TextureVariant* texture) override; + + // |flutter::TextureRegistrar| + bool MarkTextureFrameAvailable(int64_t texture_id) override; + + // |flutter::TextureRegistrar| + bool UnregisterTexture(int64_t texture_id) override; + + private: + // Handle for interacting with the C API. + FlTextureRegistrar* texture_registrar_ref_; + std::map textures_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_TEXTURE_REGISTRAR_IMPL_H_ diff --git a/linux/flutter_webrtc/flutter_web_r_t_c_plugin.h b/linux/flutter_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..5d88c7b76d --- /dev/null +++ b/linux/flutter_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,25 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include +G_BEGIN_DECLS + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + +typedef struct _FlutterWebrtcPlugin FlutterWebrtcPlugin; +typedef struct { + GObjectClass parent_class; +} FlutterWebrtcPluginClass; + +FLUTTER_PLUGIN_EXPORT GType flutter_webrtc_plugin_get_type(); + +FLUTTER_PLUGIN_EXPORT void flutter_web_r_t_c_plugin_register_with_registrar( + FlPluginRegistrar* registrar); + +G_END_DECLS + +#endif // PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ diff --git a/linux/flutter_webrtc_plugin.cc b/linux/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..336204d55a --- /dev/null +++ b/linux/flutter_webrtc_plugin.cc @@ -0,0 +1,78 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" +#include "task_runner_linux.h" + +const char* kChannelName = "FlutterWebRTC.Method"; + +//#if defined(_WINDOWS) + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + TaskRunner* task_runner() { return task_runner_.get(); } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()), + task_runner_(std::make_unique()) { + webrtc_ = std::make_unique(this); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; + std::unique_ptr task_runner_; +}; + +} // namespace flutter_webrtc_plugin + +void flutter_web_r_t_c_plugin_register_with_registrar( + FlPluginRegistrar* registrar) { + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + plugin_registrar); +} \ No newline at end of file diff --git a/linux/task_runner_linux.cc b/linux/task_runner_linux.cc new file mode 100644 index 0000000000..1c8a3f3240 --- /dev/null +++ b/linux/task_runner_linux.cc @@ -0,0 +1,31 @@ +#include "task_runner_linux.h" + +#include + +namespace flutter_webrtc_plugin { + +void TaskRunnerLinux::EnqueueTask(TaskClosure task) { + { + std::lock_guard lock(tasks_mutex_); + tasks_.push(std::move(task)); + } + + GMainContext* context = g_main_context_default(); + if (context) { + g_main_context_invoke( + context, + [](gpointer user_data) -> gboolean { + TaskRunnerLinux* runner = static_cast(user_data); + std::lock_guard lock(runner->tasks_mutex_); + while (!runner->tasks_.empty()) { + TaskClosure task = std::move(runner->tasks_.front()); + runner->tasks_.pop(); + task(); + } + return G_SOURCE_REMOVE; + }, + this); + } +} + +} // namespace flutter_webrtc_plugin diff --git a/linux/task_runner_linux.h b/linux/task_runner_linux.h new file mode 100644 index 0000000000..cff94639f7 --- /dev/null +++ b/linux/task_runner_linux.h @@ -0,0 +1,26 @@ +#ifndef PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ +#define PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ + +#include +#include +#include +#include "task_runner.h" + +namespace flutter_webrtc_plugin { + +class TaskRunnerLinux : public TaskRunner { + public: + TaskRunnerLinux() = default; + ~TaskRunnerLinux() override = default; + + // TaskRunner implementation. + void EnqueueTask(TaskClosure task) override; + + private: + std::mutex tasks_mutex_; + std::queue tasks_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ \ No newline at end of file diff --git a/macos/Classes/AudioManager.h b/macos/Classes/AudioManager.h new file mode 120000 index 0000000000..1c21c3e586 --- /dev/null +++ b/macos/Classes/AudioManager.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.h \ No newline at end of file diff --git a/macos/Classes/AudioManager.m b/macos/Classes/AudioManager.m new file mode 120000 index 0000000000..bbcfb519ed --- /dev/null +++ b/macos/Classes/AudioManager.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.m \ No newline at end of file diff --git a/macos/Classes/AudioProcessingAdapter.h b/macos/Classes/AudioProcessingAdapter.h new file mode 120000 index 0000000000..f3048db2f1 --- /dev/null +++ b/macos/Classes/AudioProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.h \ No newline at end of file diff --git a/macos/Classes/AudioProcessingAdapter.m b/macos/Classes/AudioProcessingAdapter.m new file mode 120000 index 0000000000..803efdda50 --- /dev/null +++ b/macos/Classes/AudioProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.m \ No newline at end of file diff --git a/macos/Classes/AudioUtils.h b/macos/Classes/AudioUtils.h new file mode 120000 index 0000000000..efc6c758c8 --- /dev/null +++ b/macos/Classes/AudioUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.h \ No newline at end of file diff --git a/macos/Classes/AudioUtils.m b/macos/Classes/AudioUtils.m new file mode 120000 index 0000000000..5023efe9fd --- /dev/null +++ b/macos/Classes/AudioUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.m \ No newline at end of file diff --git a/macos/Classes/CameraUtils.h b/macos/Classes/CameraUtils.h new file mode 120000 index 0000000000..a31c2baab2 --- /dev/null +++ b/macos/Classes/CameraUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.h \ No newline at end of file diff --git a/macos/Classes/CameraUtils.m b/macos/Classes/CameraUtils.m new file mode 120000 index 0000000000..336e1ea963 --- /dev/null +++ b/macos/Classes/CameraUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.m \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.h b/macos/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.m b/macos/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index c2e039f072..0000000000 --- a/macos/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) NSNumber *flutterChannelId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index 0f1295817b..0000000000 --- a/macos/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,165 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSNumber *)flutterChannelId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterChannelId:(NSNumber *)flutterChannelId -{ - objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.flutterChannelId = dataChannelId; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(id)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - - NSData *bytes = [type isEqualToString:@"binary"] ? - ((FlutterStandardTypedData*)data).data : - [data dataUsingEncoding:NSUTF8StringEncoding]; - - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": channel.flutterChannelId, - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - id data; - if (buffer.isBinary) { - type = @"binary"; - data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": channel.flutterChannelId, - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDesktopCapturer.h b/macos/Classes/FlutterRTCDesktopCapturer.h new file mode 120000 index 0000000000..eff4773160 --- /dev/null +++ b/macos/Classes/FlutterRTCDesktopCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDesktopCapturer.m b/macos/Classes/FlutterRTCDesktopCapturer.m new file mode 120000 index 0000000000..5388e628f4 --- /dev/null +++ b/macos/Classes/FlutterRTCDesktopCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h deleted file mode 100644 index 4b1e4d7a9f..0000000000 --- a/macos/Classes/FlutterRTCFrameCapturer.h +++ /dev/null @@ -1,8 +0,0 @@ -#import -#import - -@interface FlutterRTCFrameCapturer : NSObject - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; - -@end diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m deleted file mode 100644 index 4c89d41e78..0000000000 --- a/macos/Classes/FlutterRTCFrameCapturer.m +++ /dev/null @@ -1,81 +0,0 @@ -#import -#import "FlutterRTCFrameCapturer.h" - -#include "libyuv.h" - -@import CoreImage; -@import CoreVideo; - -@implementation FlutterRTCFrameCapturer { - RTCVideoTrack* _track; - NSString* _path; - FlutterResult _result; - bool _gotFrame; -} - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result -{ - self = [super init]; - if (self) { - _gotFrame = false; - _track = track; - _path = path; - _result = result; - [track addRenderer:self]; - } - return self; -} - -- (void)setSize:(CGSize)size -{ -} - -- (void)renderFrame:(nullable RTCVideoFrame *)frame -{ - if (_gotFrame || frame == nil) return; - _gotFrame = true; - - id buffer = frame.buffer; - CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; - - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; - CIContext *context = [CIContext contextWithOptions:nil]; - CGImageRef cgImage = [context createCGImage:ciImage - fromRect:CGRectMake(0, 0, frame.width, frame.height)]; -#if 0 //TODO: frame capture - UIImageOrientation orientation; - switch (frame.rotation) { - case RTCVideoRotation_90: - orientation = UIImageOrientationRight; - break; - case RTCVideoRotation_180: - orientation = UIImageOrientationDown; - break; - case RTCVideoRotation_270: - orientation = UIImageOrientationLeft; - default: - orientation = UIImageOrientationUp; - break; - } - - UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; - CGImageRelease(cgImage); - NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); - - if ([jpgData writeToFile:_path atomically:NO]) { - NSLog(@"File writed successfully to %@", _path); - _result(nil); - } else { - NSLog(@"Failed to write to file"); - _result([FlutterError errorWithCode:@"CaptureFrameFailed" - message:@"Failed to write JPEG data to file" - details:nil]); - } -#endif - dispatch_async(dispatch_get_main_queue(), ^{ - [self->_track removeRenderer:self]; - self->_track = nil; - }); -} - -@end diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCryptor.h b/macos/Classes/FlutterRTCFrameCryptor.h new file mode 120000 index 0000000000..ad3e0de33e --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCryptor.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCryptor.m b/macos/Classes/FlutterRTCFrameCryptor.m new file mode 120000 index 0000000000..bd62d3db56 --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCryptor.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 255a472380..0000000000 --- a/macos/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,20 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track - toPath:(NSString *) path - result:(FlutterResult) result; -@end diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 5b52c9b65b..0000000000 --- a/macos/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,557 +0,0 @@ -#import - -#import - -#import "FlutterRTCFrameCapturer.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ @"minWidth" : @"1280", - @"minHeight" : @"720", - @"minFrameRate" : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - self._usingFrontCamera = YES; - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionUnspecified; - } - videoDevice = [self findDeviceForPosition:position]; - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - //TODO(rostopira): refactor to separate function and add support for max - - self._targetWidth = 1280; - self._targetHeight = 720; - self._targetFps = 30; - - id mandatory = videoConstraints[@"mandatory"]; - // constraints.video.mandatory - if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) - { - id widthConstraint = mandatory[@"minWidth"]; - if ([widthConstraint isKindOfClass:[NSString class]]) { - int possibleWidth = [widthConstraint intValue]; - if (possibleWidth != 0) { - self._targetWidth = possibleWidth; - } - } - id heightConstraint = mandatory[@"minHeight"]; - if ([heightConstraint isKindOfClass:[NSString class]]) { - int possibleHeight = [heightConstraint intValue]; - if (possibleHeight != 0) { - self._targetHeight = possibleHeight; - } - } - id fpsConstraint = mandatory[@"minFrameRate"]; - if ([fpsConstraint isKindOfClass:[NSString class]]) { - int possibleFps = [fpsConstraint intValue]; - if (possibleFps != 0) { - self._targetFps = possibleFps; - } - } - } - - if (videoDevice) { - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - if (self.videoCapturer) { - [self.videoCapturer stopCapture]; - } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { - if (error) { - NSLog(@"Start capture error: %@", [error localizedDescription]); - } - }]; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result { - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - /* TODO: scree capture - FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; - - [screenCapturer startCapture]; - - //TODO: - self.videoCapturer = screenCapturer; - */ - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - --(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't capture frame."); - return; - } - - FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - if (position == AVCaptureDevicePositionUnspecified) { - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; - for (AVCaptureDevice *device in captureDevices) { - if (device.position == position) { - return device; - } - } - return captureDevices[0]; -} - -- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { - NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; - AVCaptureDeviceFormat *selectedFormat = nil; - int currentDiff = INT_MAX; - for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); - if (diff < currentDiff) { - selectedFormat = format; - currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { - selectedFormat = format; - } - } - return selectedFormat; -} - -- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { - Float64 maxSupportedFramerate = 0; - for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { - maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); - } - return fmin(maxSupportedFramerate, self._targetFps); -} - -@end diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b99f885b0a..0000000000 --- a/macos/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,43 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - --(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection; - -@end diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index 6517f85ffe..0000000000 --- a/macos/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,504 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - dataChannel.flutterChannelId = dataChannelId; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -@end - diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index 7fdc81b253..0000000000 --- a/macos/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 928051960d..0000000000 --- a/macos/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,294 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import -#include "libyuv.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _frameSize; - CGSize _renderSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer:(size_t)width height:(size_t)height { - if(_pixelBufferRef != nil) { - RTCCVPixelBuffer *rtcPixelbuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:_pixelBufferRef]; - CVPixelBufferRef outbuffer; - CVPixelBufferCreate(kCFAllocatorDefault, - width, height, - kCVPixelFormatType_32BGRA, - nil, &outbuffer); - - [rtcPixelbuffer cropAndScaleTo:outbuffer withTempBuffer:CVPixelBufferGetBaseAddress(outbuffer)]; - return outbuffer; - } - return nil; -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - _isFirstFrameRendered = false; - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - - --(id) correctRotation:(const id) src - withRotation:(RTCVideoRotation) rotation -{ - - int rotated_width = src.width; - int rotated_height = src.height; - - if (rotation == RTCVideoRotation_90 || - rotation == RTCVideoRotation_270) { - int temp = rotated_width; - rotated_width = rotated_height; - rotated_height = temp; - } - - id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; - - I420Rotate(src.dataY, src.strideY, - src.dataU, src.strideU, - src.dataV, src.strideV, - (uint8_t*)buffer.dataY, buffer.strideY, - (uint8_t*)buffer.dataU,buffer.strideU, - (uint8_t*)buffer.dataV, buffer.strideV, - src.width, src.height, - (RotationModeEnum)rotation); - - return buffer; -} - --(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame -{ - id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; - CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); - - const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); - if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // NV12 - uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); - const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); - uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); - const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); - - I420ToNV12(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dstY, - (int)dstYStride, - dstUV, - (int)dstUVStride, - i420Buffer.width, - i420Buffer.height); - } else { - uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); - const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); - - if (pixelFormat == kCVPixelFormatType_32BGRA) { - // Corresponds to libyuv::FOURCC_ARGB - I420ToARGB(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } else if (pixelFormat == kCVPixelFormatType_32ARGB) { - // Corresponds to libyuv::FOURCC_BGRA - I420ToBGRA(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } - } - - CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - if(_renderSize.width != frame.width || _renderSize.height != frame.height){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(frame.width), - @"height": @(frame.height), - }); - } - }); - _renderSize = CGSizeMake(frame.width, frame.height); - } - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - if (!strongSelf->_isFirstFrameRendered) { - if (strongSelf.eventSink) { - strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); - strongSelf->_isFirstFrameRendered = true; - } - } - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - - _frameSize = size; - } -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; -} - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId{ - - RTCVideoTrack *videoTrack; - RTCMediaStream *stream = [self streamForId:streamId peerConnectionId:peerConnectionId]; - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"No video track for RTCMediaStream: %@", streamId); - } - } else { - videoTrack = nil; - } - - view.videoTrack = videoTrack; -} - -@end - diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index 993169ea64..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,23 +0,0 @@ -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, strong) NSObject* messenger; -@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; -@property (nonatomic) BOOL _usingFrontCamera; -@property (nonatomic) int _targetWidth; -@property (nonatomic) int _targetHeight; -@property (nonatomic) int _targetFps; - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index bfbe4535aa..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,721 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" - -#import - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"FlutterWebRTC.Method" - binaryMessenger:[registrar messenger]]; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - } - - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getDisplayMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getDisplayMedia:constraints result:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"captureFrame" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* path = argsMap[@"path"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStreamTrack *track = [self trackForId: trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"sendDtmf" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* tone = argsMap[@"tone"]; - int duration = ((NSNumber*)argsMap[@"duration"]).intValue; - int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - - RTCRtpSender* audioSender = nil ; - for( RTCRtpSender *rtpSender in peerConnection.senders){ - if([[[rtpSender track] kind] isEqualToString:@"audio"]) { - audioSender = rtpSender; - } - } - if(audioSender){ - NSOperationQueue *queue = [[NSOperationQueue alloc] init]; - [queue addOperationWithBlock:^{ - double durationMs = duration / 1000.0; - double interToneGapMs = interToneGap / 1000.0; - [audioSender.dtmfSender insertDtmf :(NSString *)tone - duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; - NSLog(@"DTMF Tone played "); - }]; - } - - result(@{@"result": @"success"}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if ([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - } else if ([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* type = argsMap[@"type"]; - id data = argsMap[@"data"]; - - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - } else if ([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - } else if ([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if(source){ - [self.videoCapturer stopCapture]; - self.videoCapturer = nil; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* enabled = argsMap[@"enabled"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil){ - track.isEnabled = enabled.boolValue; - } - result(nil); - } else if ([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (peerConnection) { - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - } - result(nil); - } else if ([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - } else if ([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - NSString *peerConnectionId = argsMap[@"peerConnectionId"]; - if(render){ - [self setStreamId:streamId view:render peerConnectionId:peerConnectionId]; - } - result(nil); - } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSwitchCamera:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setVolume" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* volume = argsMap[@"volume"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - RTCAudioSource *audioSource = audioTrack.source; - audioSource.volume = [volume doubleValue]; - } - result(nil); - } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - audioTrack.isEnabled = !mute.boolValue; - } - result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* enable = argsMap[@"enable"]; -#if 0 - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:enable.boolValue ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 - error:nil]; - [audioSession setActive:YES error:nil]; -#endif - result(nil); - } else if ([@"getLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.localDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.remoteDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setConfiguration" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"ownerTag"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - } - return stream; -} - -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId -{ - RTCMediaStreamTrack *track = _localTracks[trackId]; - if (!track) { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - track = peerConnection.remoteTracks[trackId]; - if (track) { - break; - } - } - } - - return track; -} - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { - config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; - } - - if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { - NSString *bundlePolicy = json[@"bundlePolicy"]; - if ([bundlePolicy isEqualToString:@"balanced"]) { - config.bundlePolicy = RTCBundlePolicyBalanced; - } else if ([bundlePolicy isEqualToString:@"max-compat"]) { - config.bundlePolicy = RTCBundlePolicyMaxCompat; - } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { - config.bundlePolicy = RTCBundlePolicyMaxBundle; - } - } - - if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { - config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; - } - - if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { - config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - - if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { - NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; - if ([iceTransportPolicy isEqualToString:@"all"]) { - config.iceTransportPolicy = RTCIceTransportPolicyAll; - } else if ([iceTransportPolicy isEqualToString:@"none"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNone; - } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNoHost; - } else if ([iceTransportPolicy isEqualToString:@"relay"]) { - config.iceTransportPolicy = RTCIceTransportPolicyRelay; - } - } - - if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { - NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; - if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; - } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; - } - } - - if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { - NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; - if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; - } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; - } - } - - if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { - NSString *sdpSemantics = json[@"sdpSemantics"]; - if ([sdpSemantics isEqualToString:@"plan-b"]) { - config.sdpSemantics = RTCSdpSemanticsPlanB; - } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { - config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - } - } - - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -@end diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.swift b/macos/Classes/FlutterWebRTCPlugin.swift deleted file mode 100644 index 9521120a03..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.swift +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import FlutterMacOS -import Foundation - -public class FLEFlutterWebRTCPlugin: NSObject, FlutterPlugin { - public static func register(with registrar: FlutterPluginRegistrar) { - FlutterWebRTCPlugin.register(with: registrar) - } -} diff --git a/macos/Classes/LocalAudioTrack.h b/macos/Classes/LocalAudioTrack.h new file mode 120000 index 0000000000..421b56b2af --- /dev/null +++ b/macos/Classes/LocalAudioTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.h \ No newline at end of file diff --git a/macos/Classes/LocalAudioTrack.m b/macos/Classes/LocalAudioTrack.m new file mode 120000 index 0000000000..71fa724d15 --- /dev/null +++ b/macos/Classes/LocalAudioTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.m \ No newline at end of file diff --git a/macos/Classes/LocalTrack.h b/macos/Classes/LocalTrack.h new file mode 120000 index 0000000000..7d41789949 --- /dev/null +++ b/macos/Classes/LocalTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalTrack.h \ No newline at end of file diff --git a/macos/Classes/LocalVideoTrack.h b/macos/Classes/LocalVideoTrack.h new file mode 120000 index 0000000000..5069f7dd17 --- /dev/null +++ b/macos/Classes/LocalVideoTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.h \ No newline at end of file diff --git a/macos/Classes/LocalVideoTrack.m b/macos/Classes/LocalVideoTrack.m new file mode 120000 index 0000000000..182490a4fb --- /dev/null +++ b/macos/Classes/LocalVideoTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.m \ No newline at end of file diff --git a/macos/Classes/VideoProcessingAdapter.h b/macos/Classes/VideoProcessingAdapter.h new file mode 120000 index 0000000000..d93141230c --- /dev/null +++ b/macos/Classes/VideoProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.h \ No newline at end of file diff --git a/macos/Classes/VideoProcessingAdapter.m b/macos/Classes/VideoProcessingAdapter.m new file mode 120000 index 0000000000..c80ad1ca73 --- /dev/null +++ b/macos/Classes/VideoProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.m \ No newline at end of file diff --git a/macos/WebRTC.framework/Headers b/macos/WebRTC.framework/Headers deleted file mode 120000 index a177d2a6b9..0000000000 --- a/macos/WebRTC.framework/Headers +++ /dev/null @@ -1 +0,0 @@ -Versions/Current/Headers \ No newline at end of file diff --git a/macos/WebRTC.framework/Modules b/macos/WebRTC.framework/Modules deleted file mode 120000 index 5736f3186e..0000000000 --- a/macos/WebRTC.framework/Modules +++ /dev/null @@ -1 +0,0 @@ -Versions/Current/Modules \ No newline at end of file diff --git a/macos/WebRTC.framework/Resources b/macos/WebRTC.framework/Resources deleted file mode 120000 index 953ee36f3b..0000000000 --- a/macos/WebRTC.framework/Resources +++ /dev/null @@ -1 +0,0 @@ -Versions/Current/Resources \ No newline at end of file diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCAudioSource.h b/macos/WebRTC.framework/Versions/A/Headers/RTCAudioSource.h deleted file mode 100644 index d1030e3fef..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCAudioSource.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCMediaSource.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCAudioSource : RTCMediaSource - -- (instancetype)init NS_UNAVAILABLE; - -// Sets the volume for the RTCMediaSource. |volume| is a gain value in the range -// [0, 10]. -// Temporary fix to be able to modify volume of remote audio tracks. -// TODO(kthelgason): Property stays here temporarily until a proper volume-api -// is available on the surface exposed by webrtc. -@property(nonatomic, assign) double volume; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCAudioTrack.h b/macos/WebRTC.framework/Versions/A/Headers/RTCAudioTrack.h deleted file mode 100644 index 501ef92ec4..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCAudioTrack.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMacros.h" -#import "RTCMediaStreamTrack.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCAudioSource; - -RTC_OBJC_EXPORT -@interface RTCAudioTrack : RTCMediaStreamTrack - -- (instancetype)init NS_UNAVAILABLE; - -/** The audio source for this audio track. */ -@property(nonatomic, readonly) RTCAudioSource *source; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCVPixelBuffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCVPixelBuffer.h deleted file mode 100644 index 432a382574..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCVPixelBuffer.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoFrameBuffer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** RTCVideoFrameBuffer containing a CVPixelBufferRef */ -RTC_OBJC_EXPORT -@interface RTCCVPixelBuffer : NSObject - -@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer; -@property(nonatomic, readonly) int cropX; -@property(nonatomic, readonly) int cropY; -@property(nonatomic, readonly) int cropWidth; -@property(nonatomic, readonly) int cropHeight; - -+ (NSSet *)supportedPixelFormats; - -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer; -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - adaptedWidth:(int)adaptedWidth - adaptedHeight:(int)adaptedHeight - cropWidth:(int)cropWidth - cropHeight:(int)cropHeight - cropX:(int)cropX - cropY:(int)cropY; - -- (BOOL)requiresCropping; -- (BOOL)requiresScalingToWidth:(int)width height:(int)height; -- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height; - -/** The minimum size of the |tmpBuffer| must be the number of bytes returned from the - * bufferSizeForCroppingAndScalingToWidth:height: method. - * If that size is 0, the |tmpBuffer| may be nil. - */ -- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer - withTempBuffer:(nullable uint8_t *)tmpBuffer; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCallbackLogger.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCallbackLogger.h deleted file mode 100644 index 2bce03fe0f..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCallbackLogger.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCLogging.h" -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -typedef void (^RTCCallbackLoggerMessageHandler)(NSString *message); -typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message, - RTCLoggingSeverity severity); - -// This class intercepts WebRTC logs and forwards them to a registered block. -// This class is not threadsafe. -RTC_OBJC_EXPORT -@interface RTCCallbackLogger : NSObject - -// The severity level to capture. The default is kRTCLoggingSeverityInfo. -@property(nonatomic, assign) RTCLoggingSeverity severity; - -// The callback handler will be called on the same thread that does the -// logging, so if the logging callback can be slow it may be a good idea -// to implement dispatching to some other queue. -- (void)start:(nullable RTCCallbackLoggerMessageHandler)handler; -- (void)startWithMessageAndSeverityHandler: - (nullable RTCCallbackLoggerMessageAndSeverityHandler)handler; - -- (void)stop; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCameraVideoCapturer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCameraVideoCapturer.h deleted file mode 100644 index 2b5e56f4ec..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCameraVideoCapturer.h +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import "RTCMacros.h" -#import "RTCVideoCapturer.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate -// (usually RTCVideoSource). -NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") -@interface RTCCameraVideoCapturer : RTCVideoCapturer - -// Capture session that is used for capturing. Valid from initialization to dealloc. -@property(readonly, nonatomic) AVCaptureSession *captureSession; - -// Returns list of available capture devices that support video capture. -+ (NSArray *)captureDevices; -// Returns list of formats that are supported by this class for this device. -+ (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; - -// Returns the most efficient supported output pixel format for this capturer. -- (FourCharCode)preferredOutputPixelFormat; - -// Starts the capture session asynchronously and notifies callback on completion. -// The device will capture video in the format given in the `format` parameter. If the pixel format -// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the -// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used. -- (void)startCaptureWithDevice:(AVCaptureDevice *)device - format:(AVCaptureDeviceFormat *)format - fps:(NSInteger)fps - completionHandler:(nullable void (^)(NSError *))completionHandler; -// Stops the capture session asynchronously and notifies callback on completion. -- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; - -// Starts the capture session asynchronously. -- (void)startCaptureWithDevice:(AVCaptureDevice *)device - format:(AVCaptureDeviceFormat *)format - fps:(NSInteger)fps; -// Stops the capture session asynchronously. -- (void)stopCapture; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCertificate.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCertificate.h deleted file mode 100644 index 50c1ca55a3..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCertificate.h +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCCertificate : NSObject - -/** Private key in PEM. */ -@property(nonatomic, readonly, copy) NSString *private_key; - -/** Public key in an x509 cert encoded in PEM. */ -@property(nonatomic, readonly, copy) NSString *certificate; - -/** - * Initialize an RTCCertificate with PEM strings for private_key and certificate. - */ -- (instancetype)initWithPrivateKey:(NSString *)private_key - certificate:(NSString *)certificate NS_DESIGNATED_INITIALIZER; - -- (instancetype)init NS_UNAVAILABLE; - -/** Generate a new certificate for 're' use. - * - * Optional dictionary of parameters. Defaults to KeyType ECDSA if none are - * provided. - * - name: "ECDSA" or "RSASSA-PKCS1-v1_5" - */ -+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCodecSpecificInfo.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCodecSpecificInfo.h deleted file mode 100644 index e2ae4cafa1..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCodecSpecificInfo.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Implement this protocol to pass codec specific info from the encoder. - * Corresponds to webrtc::CodecSpecificInfo. - */ -RTC_OBJC_EXPORT -@protocol RTCCodecSpecificInfo -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCodecSpecificInfoH264.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCodecSpecificInfoH264.h deleted file mode 100644 index ece9570a13..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCodecSpecificInfoH264.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCCodecSpecificInfo.h" -#import "RTCMacros.h" - -/** Class for H264 specific config. */ -typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) { - RTCH264PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed - RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed -}; - -RTC_OBJC_EXPORT -@interface RTCCodecSpecificInfoH264 : NSObject - -@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCConfiguration.h b/macos/WebRTC.framework/Versions/A/Headers/RTCConfiguration.h deleted file mode 100644 index f9e6edfd97..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCConfiguration.h +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCCertificate.h" -#import "RTCCryptoOptions.h" -#import "RTCMacros.h" - -@class RTCIceServer; -@class RTCIntervalRange; - -/** - * Represents the ice transport policy. This exposes the same states in C++, - * which include one more state than what exists in the W3C spec. - */ -typedef NS_ENUM(NSInteger, RTCIceTransportPolicy) { - RTCIceTransportPolicyNone, - RTCIceTransportPolicyRelay, - RTCIceTransportPolicyNoHost, - RTCIceTransportPolicyAll -}; - -/** Represents the bundle policy. */ -typedef NS_ENUM(NSInteger, RTCBundlePolicy) { - RTCBundlePolicyBalanced, - RTCBundlePolicyMaxCompat, - RTCBundlePolicyMaxBundle -}; - -/** Represents the rtcp mux policy. */ -typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) { RTCRtcpMuxPolicyNegotiate, RTCRtcpMuxPolicyRequire }; - -/** Represents the tcp candidate policy. */ -typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) { - RTCTcpCandidatePolicyEnabled, - RTCTcpCandidatePolicyDisabled -}; - -/** Represents the candidate network policy. */ -typedef NS_ENUM(NSInteger, RTCCandidateNetworkPolicy) { - RTCCandidateNetworkPolicyAll, - RTCCandidateNetworkPolicyLowCost -}; - -/** Represents the continual gathering policy. */ -typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) { - RTCContinualGatheringPolicyGatherOnce, - RTCContinualGatheringPolicyGatherContinually -}; - -/** Represents the encryption key type. */ -typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) { - RTCEncryptionKeyTypeRSA, - RTCEncryptionKeyTypeECDSA, -}; - -/** Represents the chosen SDP semantics for the RTCPeerConnection. */ -typedef NS_ENUM(NSInteger, RTCSdpSemantics) { - RTCSdpSemanticsPlanB, - RTCSdpSemanticsUnifiedPlan, -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCConfiguration : NSObject - -/** An array of Ice Servers available to be used by ICE. */ -@property(nonatomic, copy) NSArray *iceServers; - -/** An RTCCertificate for 're' use. */ -@property(nonatomic, nullable) RTCCertificate *certificate; - -/** Which candidates the ICE agent is allowed to use. The W3C calls it - * |iceTransportPolicy|, while in C++ it is called |type|. */ -@property(nonatomic, assign) RTCIceTransportPolicy iceTransportPolicy; - -/** The media-bundling policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCBundlePolicy bundlePolicy; - -/** The rtcp-mux policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy; -@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy; -@property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy; -@property(nonatomic, assign) RTCContinualGatheringPolicy continualGatheringPolicy; - -/** If set to YES, don't gather IPv6 ICE candidates. - * Default is NO. - */ -@property(nonatomic, assign) BOOL disableIPV6; - -/** If set to YES, don't gather IPv6 ICE candidates on Wi-Fi. - * Only intended to be used on specific devices. Certain phones disable IPv6 - * when the screen is turned off and it would be better to just disable the - * IPv6 ICE candidates on Wi-Fi in those cases. - * Default is NO. - */ -@property(nonatomic, assign) BOOL disableIPV6OnWiFi; - -/** By default, the PeerConnection will use a limited number of IPv6 network - * interfaces, in order to avoid too many ICE candidate pairs being created - * and delaying ICE completion. - * - * Can be set to INT_MAX to effectively disable the limit. - */ -@property(nonatomic, assign) int maxIPv6Networks; - -/** Exclude link-local network interfaces - * from considertaion for gathering ICE candidates. - * Defaults to NO. - */ -@property(nonatomic, assign) BOOL disableLinkLocalNetworks; - -@property(nonatomic, assign) int audioJitterBufferMaxPackets; -@property(nonatomic, assign) BOOL audioJitterBufferFastAccelerate; -@property(nonatomic, assign) int iceConnectionReceivingTimeout; -@property(nonatomic, assign) int iceBackupCandidatePairPingInterval; - -/** Key type used to generate SSL identity. Default is ECDSA. */ -@property(nonatomic, assign) RTCEncryptionKeyType keyType; - -/** ICE candidate pool size as defined in JSEP. Default is 0. */ -@property(nonatomic, assign) int iceCandidatePoolSize; - -/** Prune turn ports on the same network to the same turn server. - * Default is NO. - */ -@property(nonatomic, assign) BOOL shouldPruneTurnPorts; - -/** If set to YES, this means the ICE transport should presume TURN-to-TURN - * candidate pairs will succeed, even before a binding response is received. - */ -@property(nonatomic, assign) BOOL shouldPresumeWritableWhenFullyRelayed; - -/** If set to non-nil, controls the minimal interval between consecutive ICE - * check packets. - */ -@property(nonatomic, copy, nullable) NSNumber *iceCheckMinInterval; - -/** ICE Periodic Regathering - * If set, WebRTC will periodically create and propose candidates without - * starting a new ICE generation. The regathering happens continuously with - * interval specified in milliseconds by the uniform distribution [a, b]. - */ -@property(nonatomic, strong, nullable) RTCIntervalRange *iceRegatherIntervalRange; - -/** Configure the SDP semantics used by this PeerConnection. Note that the - * WebRTC 1.0 specification requires UnifiedPlan semantics. The - * RTCRtpTransceiver API is only available with UnifiedPlan semantics. - * - * PlanB will cause RTCPeerConnection to create offers and answers with at - * most one audio and one video m= section with multiple RTCRtpSenders and - * RTCRtpReceivers specified as multiple a=ssrc lines within the section. This - * will also cause RTCPeerConnection to ignore all but the first m= section of - * the same media type. - * - * UnifiedPlan will cause RTCPeerConnection to create offers and answers with - * multiple m= sections where each m= section maps to one RTCRtpSender and one - * RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both video. This - * will also cause RTCPeerConnection to ignore all but the first a=ssrc lines - * that form a Plan B stream. - * - * For users who wish to send multiple audio/video streams and need to stay - * interoperable with legacy WebRTC implementations or use legacy APIs, - * specify PlanB. - * - * For all other users, specify UnifiedPlan. - */ -@property(nonatomic, assign) RTCSdpSemantics sdpSemantics; - -/** Actively reset the SRTP parameters when the DTLS transports underneath are - * changed after offer/answer negotiation. This is only intended to be a - * workaround for crbug.com/835958 - */ -@property(nonatomic, assign) BOOL activeResetSrtpParams; - -/** - * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection - * that it should use the MediaTransportInterface. - */ -@property(nonatomic, assign) BOOL useMediaTransport; - -/** - * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection - * that it should use the MediaTransportInterface for data channels. - */ -@property(nonatomic, assign) BOOL useMediaTransportForDataChannels; - -/** - * Defines advanced optional cryptographic settings related to SRTP and - * frame encryption for native WebRTC. Setting this will overwrite any - * options set through the PeerConnectionFactory (which is deprecated). - */ -@property(nonatomic, nullable) RTCCryptoOptions *cryptoOptions; - -/** - * Time interval between audio RTCP reports. - */ -@property(nonatomic, assign) int rtcpAudioReportIntervalMs; - -/** - * Time interval between video RTCP reports. - */ -@property(nonatomic, assign) int rtcpVideoReportIntervalMs; - -- (instancetype)init; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCCryptoOptions.h b/macos/WebRTC.framework/Versions/A/Headers/RTCCryptoOptions.h deleted file mode 100644 index b465bb5a73..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCCryptoOptions.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** - * Objective-C bindings for webrtc::CryptoOptions. This API had to be flattened - * as Objective-C doesn't support nested structures. - */ -RTC_OBJC_EXPORT -@interface RTCCryptoOptions : NSObject - -/** - * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used - * if both sides enable it - */ -@property(nonatomic, assign) BOOL srtpEnableGcmCryptoSuites; -/** - * If set to true, the (potentially insecure) crypto cipher - * SRTP_AES128_CM_SHA1_32 will be included in the list of supported ciphers - * during negotiation. It will only be used if both peers support it and no - * other ciphers get preferred. - */ -@property(nonatomic, assign) BOOL srtpEnableAes128Sha1_32CryptoCipher; -/** - * If set to true, encrypted RTP header extensions as defined in RFC 6904 - * will be negotiated. They will only be used if both peers support them. - */ -@property(nonatomic, assign) BOOL srtpEnableEncryptedRtpHeaderExtensions; - -/** - * If set all RtpSenders must have an FrameEncryptor attached to them before - * they are allowed to send packets. All RtpReceivers must have a - * FrameDecryptor attached to them before they are able to receive packets. - */ -@property(nonatomic, assign) BOOL sframeRequireFrameEncryption; - -/** - * Initializes CryptoOptions with all possible options set explicitly. This - * is done when converting from a native RTCConfiguration.crypto_options. - */ -- (instancetype)initWithSrtpEnableGcmCryptoSuites:(BOOL)srtpEnableGcmCryptoSuites - srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher - srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions - sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption - NS_DESIGNATED_INITIALIZER; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCDataChannel.h b/macos/WebRTC.framework/Versions/A/Headers/RTCDataChannel.h deleted file mode 100644 index 0cc2de87f2..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCDataChannel.h +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCDataBuffer : NSObject - -/** NSData representation of the underlying buffer. */ -@property(nonatomic, readonly) NSData *data; - -/** Indicates whether |data| contains UTF-8 or binary data. */ -@property(nonatomic, readonly) BOOL isBinary; - -- (instancetype)init NS_UNAVAILABLE; - -/** - * Initialize an RTCDataBuffer from NSData. |isBinary| indicates whether |data| - * contains UTF-8 or binary data. - */ -- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary; - -@end - -@class RTCDataChannel; -RTC_OBJC_EXPORT -@protocol RTCDataChannelDelegate - -/** The data channel state changed. */ -- (void)dataChannelDidChangeState:(RTCDataChannel *)dataChannel; - -/** The data channel successfully received a data buffer. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel - didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer; - -@optional -/** The data channel's |bufferedAmount| changed. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel didChangeBufferedAmount:(uint64_t)amount; - -@end - -/** Represents the state of the data channel. */ -typedef NS_ENUM(NSInteger, RTCDataChannelState) { - RTCDataChannelStateConnecting, - RTCDataChannelStateOpen, - RTCDataChannelStateClosing, - RTCDataChannelStateClosed, -}; - -RTC_OBJC_EXPORT -@interface RTCDataChannel : NSObject - -/** - * A label that can be used to distinguish this data channel from other data - * channel objects. - */ -@property(nonatomic, readonly) NSString *label; - -/** Whether the data channel can send messages in unreliable mode. */ -@property(nonatomic, readonly) BOOL isReliable DEPRECATED_ATTRIBUTE; - -/** Returns whether this data channel is ordered or not. */ -@property(nonatomic, readonly) BOOL isOrdered; - -/** Deprecated. Use maxPacketLifeTime. */ -@property(nonatomic, readonly) NSUInteger maxRetransmitTime DEPRECATED_ATTRIBUTE; - -/** - * The length of the time window (in milliseconds) during which transmissions - * and retransmissions may occur in unreliable mode. - */ -@property(nonatomic, readonly) uint16_t maxPacketLifeTime; - -/** - * The maximum number of retransmissions that are attempted in unreliable mode. - */ -@property(nonatomic, readonly) uint16_t maxRetransmits; - -/** - * The name of the sub-protocol used with this data channel, if any. Otherwise - * this returns an empty string. - */ -@property(nonatomic, readonly) NSString *protocol; - -/** - * Returns whether this data channel was negotiated by the application or not. - */ -@property(nonatomic, readonly) BOOL isNegotiated; - -/** Deprecated. Use channelId. */ -@property(nonatomic, readonly) NSInteger streamId DEPRECATED_ATTRIBUTE; - -/** The identifier for this data channel. */ -@property(nonatomic, readonly) int channelId; - -/** The state of the data channel. */ -@property(nonatomic, readonly) RTCDataChannelState readyState; - -/** - * The number of bytes of application data that have been queued using - * |sendData:| but that have not yet been transmitted to the network. - */ -@property(nonatomic, readonly) uint64_t bufferedAmount; - -/** The delegate for this data channel. */ -@property(nonatomic, weak) id delegate; - -- (instancetype)init NS_UNAVAILABLE; - -/** Closes the data channel. */ -- (void)close; - -/** Attempt to send |data| on this data channel's underlying data transport. */ -- (BOOL)sendData:(RTCDataBuffer *)data; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCDataChannelConfiguration.h b/macos/WebRTC.framework/Versions/A/Headers/RTCDataChannelConfiguration.h deleted file mode 100644 index 96d33f4d72..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCDataChannelConfiguration.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCDataChannelConfiguration : NSObject - -/** Set to YES if ordered delivery is required. */ -@property(nonatomic, assign) BOOL isOrdered; - -/** Deprecated. Use maxPacketLifeTime. */ -@property(nonatomic, assign) NSInteger maxRetransmitTimeMs DEPRECATED_ATTRIBUTE; - -/** - * Max period in milliseconds in which retransmissions will be sent. After this - * time, no more retransmissions will be sent. -1 if unset. - */ -@property(nonatomic, assign) int maxPacketLifeTime; - -/** The max number of retransmissions. -1 if unset. */ -@property(nonatomic, assign) int maxRetransmits; - -/** Set to YES if the channel has been externally negotiated and we do not send - * an in-band signalling in the form of an "open" message. - */ -@property(nonatomic, assign) BOOL isNegotiated; - -/** Deprecated. Use channelId. */ -@property(nonatomic, assign) int streamId DEPRECATED_ATTRIBUTE; - -/** The id of the data channel. */ -@property(nonatomic, assign) int channelId; - -/** Set by the application and opaque to the WebRTC implementation. */ -@property(nonatomic) NSString* protocol; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCDefaultVideoDecoderFactory.h b/macos/WebRTC.framework/Versions/A/Headers/RTCDefaultVideoDecoderFactory.h deleted file mode 100644 index 7ca9463a59..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCDefaultVideoDecoderFactory.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoDecoderFactory.h" - -NS_ASSUME_NONNULL_BEGIN - -/** This decoder factory include support for all codecs bundled with WebRTC. If using custom - * codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory. - */ -RTC_OBJC_EXPORT -@interface RTCDefaultVideoDecoderFactory : NSObject -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCDefaultVideoEncoderFactory.h b/macos/WebRTC.framework/Versions/A/Headers/RTCDefaultVideoEncoderFactory.h deleted file mode 100644 index c45e54362b..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCDefaultVideoEncoderFactory.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoEncoderFactory.h" - -NS_ASSUME_NONNULL_BEGIN - -/** This encoder factory include support for all codecs bundled with WebRTC. If using custom - * codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory. - */ -RTC_OBJC_EXPORT -@interface RTCDefaultVideoEncoderFactory : NSObject - -@property(nonatomic, retain) RTCVideoCodecInfo *preferredCodec; - -+ (NSArray *)supportedCodecs; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCDispatcher.h b/macos/WebRTC.framework/Versions/A/Headers/RTCDispatcher.h deleted file mode 100644 index 4f8359b32c..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCDispatcher.h +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) { - // Main dispatcher queue. - RTCDispatcherTypeMain, - // Used for starting/stopping AVCaptureSession, and assigning - // capture session to AVCaptureVideoPreviewLayer. - RTCDispatcherTypeCaptureSession, - // Used for operations on AVAudioSession. - RTCDispatcherTypeAudioSession, -}; - -/** Dispatcher that asynchronously dispatches blocks to a specific - * shared dispatch queue. - */ -RTC_OBJC_EXPORT -@interface RTCDispatcher : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -/** Dispatch the block asynchronously on the queue for dispatchType. - * @param dispatchType The queue type to dispatch on. - * @param block The block to dispatch asynchronously. - */ -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType block:(dispatch_block_t)block; - -/** Returns YES if run on queue for the dispatchType otherwise NO. - * Useful for asserting that a method is run on a correct queue. - */ -+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCDtmfSender.h b/macos/WebRTC.framework/Versions/A/Headers/RTCDtmfSender.h deleted file mode 100644 index 5d86d01892..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCDtmfSender.h +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@protocol RTCDtmfSender - -/** - * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise - * returns false. To be able to send DTMF, the associated RTCRtpSender must be - * able to send packets, and a "telephone-event" codec must be negotiated. - */ -@property(nonatomic, readonly) BOOL canInsertDtmf; - -/** - * Queues a task that sends the DTMF tones. The tones parameter is treated - * as a series of characters. The characters 0 through 9, A through D, #, and * - * generate the associated DTMF tones. The characters a to d are equivalent - * to A to D. The character ',' indicates a delay of 2 seconds before - * processing the next character in the tones parameter. - * - * Unrecognized characters are ignored. - * - * @param duration The parameter indicates the duration to use for each - * character passed in the tones parameter. The duration cannot be more - * than 6000 or less than 70 ms. - * - * @param interToneGap The parameter indicates the gap between tones. - * This parameter must be at least 50 ms but should be as short as - * possible. - * - * If InsertDtmf is called on the same object while an existing task for this - * object to generate DTMF is still running, the previous task is canceled. - * Returns true on success and false on failure. - */ -- (BOOL)insertDtmf:(nonnull NSString *)tones - duration:(NSTimeInterval)duration - interToneGap:(NSTimeInterval)interToneGap; - -/** The tones remaining to be played out */ -- (nonnull NSString *)remainingTones; - -/** - * The current tone duration value. This value will be the value last set via the - * insertDtmf method, or the default value of 100 ms if insertDtmf was never called. - */ -- (NSTimeInterval)duration; - -/** - * The current value of the between-tone gap. This value will be the value last set - * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never - * called. - */ -- (NSTimeInterval)interToneGap; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCEncodedImage.h b/macos/WebRTC.framework/Versions/A/Headers/RTCEncodedImage.h deleted file mode 100644 index 670c7276ff..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCEncodedImage.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoFrame.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Represents an encoded frame's type. */ -typedef NS_ENUM(NSUInteger, RTCFrameType) { - RTCFrameTypeEmptyFrame = 0, - RTCFrameTypeAudioFrameSpeech = 1, - RTCFrameTypeAudioFrameCN = 2, - RTCFrameTypeVideoFrameKey = 3, - RTCFrameTypeVideoFrameDelta = 4, -}; - -typedef NS_ENUM(NSUInteger, RTCVideoContentType) { - RTCVideoContentTypeUnspecified, - RTCVideoContentTypeScreenshare, -}; - -/** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */ -RTC_OBJC_EXPORT -@interface RTCEncodedImage : NSObject - -@property(nonatomic, strong) NSData *buffer; -@property(nonatomic, assign) int32_t encodedWidth; -@property(nonatomic, assign) int32_t encodedHeight; -@property(nonatomic, assign) uint32_t timeStamp; -@property(nonatomic, assign) int64_t captureTimeMs; -@property(nonatomic, assign) int64_t ntpTimeMs; -@property(nonatomic, assign) uint8_t flags; -@property(nonatomic, assign) int64_t encodeStartMs; -@property(nonatomic, assign) int64_t encodeFinishMs; -@property(nonatomic, assign) RTCFrameType frameType; -@property(nonatomic, assign) RTCVideoRotation rotation; -@property(nonatomic, assign) BOOL completeFrame; -@property(nonatomic, strong) NSNumber *qp; -@property(nonatomic, assign) RTCVideoContentType contentType; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCFieldTrials.h b/macos/WebRTC.framework/Versions/A/Headers/RTCFieldTrials.h deleted file mode 100644 index 61443e8bb2..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCFieldTrials.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -/** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */ -RTC_EXTERN NSString * const kRTCFieldTrialAudioSendSideBweKey; -RTC_EXTERN NSString * const kRTCFieldTrialAudioForceNoTWCCKey; -RTC_EXTERN NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey; -RTC_EXTERN NSString * const kRTCFieldTrialSendSideBweWithOverheadKey; -RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey; -RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key; -RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey; -RTC_EXTERN NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey; - -/** The valid value for field trials above. */ -RTC_EXTERN NSString * const kRTCFieldTrialEnabledValue; - -/** Initialize field trials using a dictionary mapping field trial keys to their - * values. See above for valid keys and values. Must be called before any other - * call into WebRTC. See: webrtc/system_wrappers/include/field_trial.h - */ -RTC_EXTERN void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials); diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCFileLogger.h b/macos/WebRTC.framework/Versions/A/Headers/RTCFileLogger.h deleted file mode 100644 index cd5c1c466b..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCFileLogger.h +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) { - RTCFileLoggerSeverityVerbose, - RTCFileLoggerSeverityInfo, - RTCFileLoggerSeverityWarning, - RTCFileLoggerSeverityError -}; - -typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) { - RTCFileLoggerTypeCall, - RTCFileLoggerTypeApp, -}; - -NS_ASSUME_NONNULL_BEGIN - -// This class intercepts WebRTC logs and saves them to a file. The file size -// will not exceed the given maximum bytesize. When the maximum bytesize is -// reached, logs are rotated according to the rotationType specified. -// For kRTCFileLoggerTypeCall, logs from the beginning and the end -// are preserved while the middle section is overwritten instead. -// For kRTCFileLoggerTypeApp, the oldest log is overwritten. -// This class is not threadsafe. -RTC_OBJC_EXPORT -@interface RTCFileLogger : NSObject - -// The severity level to capture. The default is kRTCFileLoggerSeverityInfo. -@property(nonatomic, assign) RTCFileLoggerSeverity severity; - -// The rotation type for this file logger. The default is -// kRTCFileLoggerTypeCall. -@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType; - -// Disables buffering disk writes. Should be set before |start|. Buffering -// is enabled by default for performance. -@property(nonatomic, assign) BOOL shouldDisableBuffering; - -// Default constructor provides default settings for dir path, file size and -// rotation type. -- (instancetype)init; - -// Create file logger with default rotation type. -- (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize; - -- (instancetype)initWithDirPath:(NSString *)dirPath - maxFileSize:(NSUInteger)maxFileSize - rotationType:(RTCFileLoggerRotationType)rotationType NS_DESIGNATED_INITIALIZER; - -// Starts writing WebRTC logs to disk if not already started. Overwrites any -// existing file(s). -- (void)start; - -// Stops writing WebRTC logs to disk. This method is also called on dealloc. -- (void)stop; - -// Returns the current contents of the logs, or nil if start has been called -// without a stop. -- (nullable NSData *)logData; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCFileVideoCapturer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCFileVideoCapturer.h deleted file mode 100644 index 0782588d9c..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCFileVideoCapturer.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCVideoCapturer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** - * Error passing block. - */ -typedef void (^RTCFileVideoCapturerErrorBlock)(NSError *error); - -/** - * Captures buffers from bundled video file. - * - * See @c RTCVideoCapturer for more info on capturers. - */ -RTC_OBJC_EXPORT - -NS_CLASS_AVAILABLE_IOS(10) -@interface RTCFileVideoCapturer : RTCVideoCapturer - -/** - * Starts asynchronous capture of frames from video file. - * - * Capturing is not started if error occurs. Underlying error will be - * relayed in the errorBlock if one is provided. - * Successfully captured video frames will be passed to the delegate. - * - * @param nameOfFile The name of the bundled video file to be read. - * @errorBlock block to be executed upon error. - */ -- (void)startCapturingFromFileNamed:(NSString *)nameOfFile - onError:(__nullable RTCFileVideoCapturerErrorBlock)errorBlock; - -/** - * Immediately stops capture. - */ -- (void)stopCapture; -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCH264ProfileLevelId.h b/macos/WebRTC.framework/Versions/A/Headers/RTCH264ProfileLevelId.h deleted file mode 100644 index 56b353215a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCH264ProfileLevelId.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -RTC_OBJC_EXPORT extern NSString *const kRTCVideoCodecH264Name; -RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedHigh; -RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedBaseline; -RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh; -RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline; - -/** H264 Profiles and levels. */ -typedef NS_ENUM(NSUInteger, RTCH264Profile) { - RTCH264ProfileConstrainedBaseline, - RTCH264ProfileBaseline, - RTCH264ProfileMain, - RTCH264ProfileConstrainedHigh, - RTCH264ProfileHigh, -}; - -typedef NS_ENUM(NSUInteger, RTCH264Level) { - RTCH264Level1_b = 0, - RTCH264Level1 = 10, - RTCH264Level1_1 = 11, - RTCH264Level1_2 = 12, - RTCH264Level1_3 = 13, - RTCH264Level2 = 20, - RTCH264Level2_1 = 21, - RTCH264Level2_2 = 22, - RTCH264Level3 = 30, - RTCH264Level3_1 = 31, - RTCH264Level3_2 = 32, - RTCH264Level4 = 40, - RTCH264Level4_1 = 41, - RTCH264Level4_2 = 42, - RTCH264Level5 = 50, - RTCH264Level5_1 = 51, - RTCH264Level5_2 = 52 -}; - -RTC_OBJC_EXPORT -@interface RTCH264ProfileLevelId : NSObject - -@property(nonatomic, readonly) RTCH264Profile profile; -@property(nonatomic, readonly) RTCH264Level level; -@property(nonatomic, readonly) NSString *hexString; - -- (instancetype)initWithHexString:(NSString *)hexString; -- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCI420Buffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCI420Buffer.h deleted file mode 100644 index a6c7e41bcb..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCI420Buffer.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCYUVPlanarBuffer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Protocol for RTCYUVPlanarBuffers containing I420 data */ -RTC_OBJC_EXPORT -@protocol RTCI420Buffer -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCIceCandidate.h b/macos/WebRTC.framework/Versions/A/Headers/RTCIceCandidate.h deleted file mode 100644 index 3e305cc418..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCIceCandidate.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCIceCandidate : NSObject - -/** - * If present, the identifier of the "media stream identification" for the media - * component this candidate is associated with. - */ -@property(nonatomic, readonly, nullable) NSString *sdpMid; - -/** - * The index (starting at zero) of the media description this candidate is - * associated with in the SDP. - */ -@property(nonatomic, readonly) int sdpMLineIndex; - -/** The SDP string for this candidate. */ -@property(nonatomic, readonly) NSString *sdp; - -/** The URL of the ICE server which this candidate is gathered from. */ -@property(nonatomic, readonly, nullable) NSString *serverUrl; - -- (instancetype)init NS_UNAVAILABLE; - -/** - * Initialize an RTCIceCandidate from SDP. - */ -- (instancetype)initWithSdp:(NSString *)sdp - sdpMLineIndex:(int)sdpMLineIndex - sdpMid:(nullable NSString *)sdpMid NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCIceServer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCIceServer.h deleted file mode 100644 index ab5fc4a9ed..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCIceServer.h +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) { - RTCTlsCertPolicySecure, - RTCTlsCertPolicyInsecureNoCheck -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCIceServer : NSObject - -/** URI(s) for this server represented as NSStrings. */ -@property(nonatomic, readonly) NSArray *urlStrings; - -/** Username to use if this RTCIceServer object is a TURN server. */ -@property(nonatomic, readonly, nullable) NSString *username; - -/** Credential to use if this RTCIceServer object is a TURN server. */ -@property(nonatomic, readonly, nullable) NSString *credential; - -/** - * TLS certificate policy to use if this RTCIceServer object is a TURN server. - */ -@property(nonatomic, readonly) RTCTlsCertPolicy tlsCertPolicy; - -/** - If the URIs in |urls| only contain IP addresses, this field can be used - to indicate the hostname, which may be necessary for TLS (using the SNI - extension). If |urls| itself contains the hostname, this isn't necessary. - */ -@property(nonatomic, readonly, nullable) NSString *hostname; - -/** List of protocols to be used in the TLS ALPN extension. */ -@property(nonatomic, readonly) NSArray *tlsAlpnProtocols; - -/** - List elliptic curves to be used in the TLS elliptic curves extension. - Only curve names supported by OpenSSL should be used (eg. "P-256","X25519"). - */ -@property(nonatomic, readonly) NSArray *tlsEllipticCurves; - -- (nonnull instancetype)init NS_UNAVAILABLE; - -/** Convenience initializer for a server with no authentication (e.g. STUN). */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, and credentialType. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, and TLS cert policy. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, TLS cert policy and hostname. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, TLS cert policy, hostname and ALPN protocols. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname - tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, TLS cert policy, hostname, ALPN protocols and - * elliptic curves. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname - tlsAlpnProtocols:(nullable NSArray *)tlsAlpnProtocols - tlsEllipticCurves:(nullable NSArray *)tlsEllipticCurves - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCIntervalRange.h b/macos/WebRTC.framework/Versions/A/Headers/RTCIntervalRange.h deleted file mode 100644 index 00508eba17..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCIntervalRange.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -NS_ASSUME_NONNULL_BEGIN - -@interface RTCIntervalRange : NSObject - -@property(nonatomic, readonly) NSInteger min; -@property(nonatomic, readonly) NSInteger max; - -- (instancetype)init; -- (instancetype)initWithMin:(NSInteger)min max:(NSInteger)max NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCLegacyStatsReport.h b/macos/WebRTC.framework/Versions/A/Headers/RTCLegacyStatsReport.h deleted file mode 100644 index 85f2b8fb3d..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCLegacyStatsReport.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** This does not currently conform to the spec. */ -RTC_OBJC_EXPORT -@interface RTCLegacyStatsReport : NSObject - -/** Time since 1970-01-01T00:00:00Z in milliseconds. */ -@property(nonatomic, readonly) CFTimeInterval timestamp; - -/** The type of stats held by this object. */ -@property(nonatomic, readonly) NSString *type; - -/** The identifier for this object. */ -@property(nonatomic, readonly) NSString *reportId; - -/** A dictionary holding the actual stats. */ -@property(nonatomic, readonly) NSDictionary *values; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCLogging.h b/macos/WebRTC.framework/Versions/A/Headers/RTCLogging.h deleted file mode 100644 index 754945c8f2..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCLogging.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -// Subset of rtc::LoggingSeverity. -typedef NS_ENUM(NSInteger, RTCLoggingSeverity) { - RTCLoggingSeverityVerbose, - RTCLoggingSeverityInfo, - RTCLoggingSeverityWarning, - RTCLoggingSeverityError, - RTCLoggingSeverityNone, -}; - -// Wrapper for C++ RTC_LOG(sev) macros. -// Logs the log string to the webrtc logstream for the given severity. -RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string); - -// Wrapper for rtc::LogMessage::LogToDebug. -// Sets the minimum severity to be logged to console. -RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity); - -// Returns the filename with the path prefix removed. -RTC_EXTERN NSString* RTCFileName(const char* filePath); - -// Some convenience macros. - -#define RTCLogString(format, ...) \ - [NSString stringWithFormat:@"(%@:%d %s): " format, RTCFileName(__FILE__), \ - __LINE__, __FUNCTION__, ##__VA_ARGS__] - -#define RTCLogFormat(severity, format, ...) \ - do { \ - NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \ - RTCLogEx(severity, log_string); \ - } while (false) - -#define RTCLogVerbose(format, ...) \ - RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__) - -#define RTCLogInfo(format, ...) \ - RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__) - -#define RTCLogWarning(format, ...) \ - RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__) - -#define RTCLogError(format, ...) \ - RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__) - -#if !defined(NDEBUG) -#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__) -#else -#define RTCLogDebug(format, ...) \ - do { \ - } while (false) -#endif - -#define RTCLog(format, ...) RTCLogInfo(format, ##__VA_ARGS__) diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMTLNSVideoView.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMTLNSVideoView.h deleted file mode 100644 index ffed4b8b39..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMTLNSVideoView.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCVideoRenderer.h" - -NS_AVAILABLE_MAC(10.11) - -RTC_OBJC_EXPORT -@interface RTCMTLNSVideoView : NSView - -@property(nonatomic, weak) id delegate; - -+ (BOOL)isMetalAvailable; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMacros.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMacros.h deleted file mode 100644 index 7f7e64cb76..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMacros.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef SDK_OBJC_BASE_RTCMACROS_H_ -#define SDK_OBJC_BASE_RTCMACROS_H_ - -#define RTC_OBJC_EXPORT __attribute__((visibility("default"))) - -#if defined(__cplusplus) -#define RTC_EXTERN extern "C" RTC_OBJC_EXPORT -#else -#define RTC_EXTERN extern RTC_OBJC_EXPORT -#endif - -#ifdef __OBJC__ -#define RTC_FWD_DECL_OBJC_CLASS(classname) @class classname -#else -#define RTC_FWD_DECL_OBJC_CLASS(classname) typedef struct objc_object classname -#endif - -#endif // SDK_OBJC_BASE_RTCMACROS_H_ diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaConstraints.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMediaConstraints.h deleted file mode 100644 index 5c1a12e33a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaConstraints.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Constraint keys for media sources. */ -/** The value for this key should be a base64 encoded string containing - * the data from the serialized configuration proto. - */ -RTC_EXTERN NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig; - -/** Constraint keys for generating offers and answers. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsIceRestart; -RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveAudio; -RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveVideo; -RTC_EXTERN NSString *const kRTCMediaConstraintsVoiceActivityDetection; - -/** Constraint values for Boolean parameters. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue; -RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse; - -RTC_OBJC_EXPORT -@interface RTCMediaConstraints : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -/** Initialize with mandatory and/or optional constraints. */ -- (instancetype) - initWithMandatoryConstraints:(nullable NSDictionary *)mandatory - optionalConstraints:(nullable NSDictionary *)optional - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaSource.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMediaSource.h deleted file mode 100644 index 838c783208..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaSource.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -typedef NS_ENUM(NSInteger, RTCSourceState) { - RTCSourceStateInitializing, - RTCSourceStateLive, - RTCSourceStateEnded, - RTCSourceStateMuted, -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCMediaSource : NSObject - -/** The current state of the RTCMediaSource. */ -@property(nonatomic, readonly) RTCSourceState state; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaStream.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMediaStream.h deleted file mode 100644 index bb9bec690a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaStream.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCAudioTrack; -@class RTCPeerConnectionFactory; -@class RTCVideoTrack; - -RTC_OBJC_EXPORT -@interface RTCMediaStream : NSObject - -/** The audio tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *audioTracks; - -/** The video tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *videoTracks; - -/** An identifier for this media stream. */ -@property(nonatomic, readonly) NSString *streamId; - -- (instancetype)init NS_UNAVAILABLE; - -/** Adds the given audio track to this media stream. */ -- (void)addAudioTrack:(RTCAudioTrack *)audioTrack; - -/** Adds the given video track to this media stream. */ -- (void)addVideoTrack:(RTCVideoTrack *)videoTrack; - -/** Removes the given audio track to this media stream. */ -- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack; - -/** Removes the given video track to this media stream. */ -- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaStreamTrack.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMediaStreamTrack.h deleted file mode 100644 index d1ea0f28f3..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMediaStreamTrack.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -/** - * Represents the state of the track. This exposes the same states in C++. - */ -typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) { - RTCMediaStreamTrackStateLive, - RTCMediaStreamTrackStateEnded -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio; -RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo; - -RTC_OBJC_EXPORT -@interface RTCMediaStreamTrack : NSObject - -/** - * The kind of track. For example, "audio" if this track represents an audio - * track and "video" if this track represents a video track. - */ -@property(nonatomic, readonly) NSString *kind; - -/** An identifier string. */ -@property(nonatomic, readonly) NSString *trackId; - -/** The enabled state of the track. */ -@property(nonatomic, assign) BOOL isEnabled; - -/** The state of the track. */ -@property(nonatomic, readonly) RTCMediaStreamTrackState readyState; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMetrics.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMetrics.h deleted file mode 100644 index 6629fdacec..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMetrics.h +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCMetricsSampleInfo.h" - -/** - * Enables gathering of metrics (which can be fetched with - * RTCGetAndResetMetrics). Must be called before any other call into WebRTC. - */ -RTC_EXTERN void RTCEnableMetrics(void); - -/** Gets and clears native histograms. */ -RTC_EXTERN NSArray* RTCGetAndResetMetrics(void); diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMetricsSampleInfo.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMetricsSampleInfo.h deleted file mode 100644 index cd38ab9a91..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMetricsSampleInfo.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCMetricsSampleInfo : NSObject - -/** - * Example of RTCMetricsSampleInfo: - * name: "WebRTC.Video.InputFramesPerSecond" - * min: 1 - * max: 100 - * bucketCount: 50 - * samples: [29]:2 [30]:1 - */ - -/** The name of the histogram. */ -@property(nonatomic, readonly) NSString *name; - -/** The minimum bucket value. */ -@property(nonatomic, readonly) int min; - -/** The maximum bucket value. */ -@property(nonatomic, readonly) int max; - -/** The number of buckets. */ -@property(nonatomic, readonly) int bucketCount; - -/** A dictionary holding the samples . */ -@property(nonatomic, readonly) NSDictionary *samples; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMutableI420Buffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMutableI420Buffer.h deleted file mode 100644 index 098fb9a66f..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMutableI420Buffer.h +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCI420Buffer.h" -#import "RTCMutableYUVPlanarBuffer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Extension of the I420 buffer with mutable data access */ -RTC_OBJC_EXPORT -@protocol RTCMutableI420Buffer -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCMutableYUVPlanarBuffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCMutableYUVPlanarBuffer.h deleted file mode 100644 index 00dfcd94ca..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCMutableYUVPlanarBuffer.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCYUVPlanarBuffer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Extension of the YUV planar data buffer with mutable data access */ -RTC_OBJC_EXPORT -@protocol RTCMutableYUVPlanarBuffer - -@property(nonatomic, readonly) uint8_t *mutableDataY; -@property(nonatomic, readonly) uint8_t *mutableDataU; -@property(nonatomic, readonly) uint8_t *mutableDataV; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCNSGLVideoView.h b/macos/WebRTC.framework/Versions/A/Headers/RTCNSGLVideoView.h deleted file mode 100644 index 2540f38154..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCNSGLVideoView.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#if !TARGET_OS_IPHONE - -#import - -#import "RTCVideoRenderer.h" -#import "RTCVideoViewShading.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCNSGLVideoView; - -RTC_OBJC_EXPORT -@protocol RTCNSGLVideoViewDelegate -@end - -RTC_OBJC_EXPORT -@interface RTCNSGLVideoView : NSOpenGLView - -@property(nonatomic, weak) id delegate; - -- (instancetype)initWithFrame:(NSRect)frameRect - pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END - -#endif diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCNativeI420Buffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCNativeI420Buffer.h deleted file mode 100644 index 9a904f5396..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCNativeI420Buffer.h +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCI420Buffer.h" -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** RTCI420Buffer implements the RTCI420Buffer protocol */ -RTC_OBJC_EXPORT -@interface RTCI420Buffer : NSObject -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCNativeMutableI420Buffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCNativeMutableI420Buffer.h deleted file mode 100644 index 6cd5110460..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCNativeMutableI420Buffer.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCMutableI420Buffer.h" -#import "RTCNativeI420Buffer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Mutable version of RTCI420Buffer */ -RTC_OBJC_EXPORT -@interface RTCMutableI420Buffer : RTCI420Buffer -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnection.h b/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnection.h deleted file mode 100644 index c641fdd545..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnection.h +++ /dev/null @@ -1,348 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -@class RTCConfiguration; -@class RTCDataChannel; -@class RTCDataChannelConfiguration; -@class RTCIceCandidate; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCMediaStreamTrack; -@class RTCPeerConnectionFactory; -@class RTCRtpReceiver; -@class RTCRtpSender; -@class RTCRtpTransceiver; -@class RTCRtpTransceiverInit; -@class RTCSessionDescription; -@class RTCStatisticsReport; -@class RTCLegacyStatsReport; - -typedef NS_ENUM(NSInteger, RTCRtpMediaType); - -NS_ASSUME_NONNULL_BEGIN - -extern NSString *const kRTCPeerConnectionErrorDomain; -extern int const kRTCSessionDescriptionErrorCode; - -/** Represents the signaling state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCSignalingState) { - RTCSignalingStateStable, - RTCSignalingStateHaveLocalOffer, - RTCSignalingStateHaveLocalPrAnswer, - RTCSignalingStateHaveRemoteOffer, - RTCSignalingStateHaveRemotePrAnswer, - // Not an actual state, represents the total number of states. - RTCSignalingStateClosed, -}; - -/** Represents the ice connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceConnectionState) { - RTCIceConnectionStateNew, - RTCIceConnectionStateChecking, - RTCIceConnectionStateConnected, - RTCIceConnectionStateCompleted, - RTCIceConnectionStateFailed, - RTCIceConnectionStateDisconnected, - RTCIceConnectionStateClosed, - RTCIceConnectionStateCount, -}; - -/** Represents the combined ice+dtls connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCPeerConnectionState) { - RTCPeerConnectionStateNew, - RTCPeerConnectionStateConnecting, - RTCPeerConnectionStateConnected, - RTCPeerConnectionStateDisconnected, - RTCPeerConnectionStateFailed, - RTCPeerConnectionStateClosed, -}; - -/** Represents the ice gathering state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceGatheringState) { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete, -}; - -/** Represents the stats output level. */ -typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) { - RTCStatsOutputLevelStandard, - RTCStatsOutputLevelDebug, -}; - -@class RTCPeerConnection; - -RTC_OBJC_EXPORT -@protocol RTCPeerConnectionDelegate - -/** Called when the SignalingState changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeSignalingState:(RTCSignalingState)stateChanged; - -/** Called when media is received on a new stream from remote peer. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream; - -/** Called when a remote peer closes a stream. - * This is not called when RTCSdpSemanticsUnifiedPlan is specified. - */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream; - -/** Called when negotiation is needed, for example ICE has restarted. */ -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection; - -/** Called any time the IceConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeIceConnectionState:(RTCIceConnectionState)newState; - -/** Called any time the IceGatheringState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeIceGatheringState:(RTCIceGatheringState)newState; - -/** New ice candidate has been found. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didGenerateIceCandidate:(RTCIceCandidate *)candidate; - -/** Called when a group of local Ice candidates have been removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveIceCandidates:(NSArray *)candidates; - -/** New data channel has been opened. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didOpenDataChannel:(RTCDataChannel *)dataChannel; - -/** Called when signaling indicates a transceiver will be receiving media from - * the remote endpoint. - * This is only called with RTCSdpSemanticsUnifiedPlan specified. - */ -@optional -/** Called any time the PeerConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeConnectionState:(RTCPeerConnectionState)newState; - -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver; - -/** Called when a receiver and its track are created. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didAddReceiver:(RTCRtpReceiver *)rtpReceiver - streams:(NSArray *)mediaStreams; - -/** Called when the receiver and its track are removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver; - -@end - -RTC_OBJC_EXPORT -@interface RTCPeerConnection : NSObject - -/** The object that will be notifed about events such as state changes and - * streams being added or removed. - */ -@property(nonatomic, weak, nullable) id delegate; -/** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use - * |senders| instead. - */ -@property(nonatomic, readonly) NSArray *localStreams; -@property(nonatomic, readonly, nullable) RTCSessionDescription *localDescription; -@property(nonatomic, readonly, nullable) RTCSessionDescription *remoteDescription; -@property(nonatomic, readonly) RTCSignalingState signalingState; -@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState; -@property(nonatomic, readonly) RTCPeerConnectionState connectionState; -@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState; -@property(nonatomic, readonly, copy) RTCConfiguration *configuration; - -/** Gets all RTCRtpSenders associated with this peer connection. - * Note: reading this property returns different instances of RTCRtpSender. - * Use isEqual: instead of == to compare RTCRtpSender instances. - */ -@property(nonatomic, readonly) NSArray *senders; - -/** Gets all RTCRtpReceivers associated with this peer connection. - * Note: reading this property returns different instances of RTCRtpReceiver. - * Use isEqual: instead of == to compare RTCRtpReceiver instances. - */ -@property(nonatomic, readonly) NSArray *receivers; - -/** Gets all RTCRtpTransceivers associated with this peer connection. - * Note: reading this property returns different instances of - * RTCRtpTransceiver. Use isEqual: instead of == to compare RTCRtpTransceiver - * instances. - * This is only available with RTCSdpSemanticsUnifiedPlan specified. - */ -@property(nonatomic, readonly) NSArray *transceivers; - -- (instancetype)init NS_UNAVAILABLE; - -/** Sets the PeerConnection's global configuration to |configuration|. - * Any changes to STUN/TURN servers or ICE candidate policy will affect the - * next gathering phase, and cause the next call to createOffer to generate - * new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies - * cannot be changed with this method. - */ -- (BOOL)setConfiguration:(RTCConfiguration *)configuration; - -/** Terminate all media and close the transport. */ -- (void)close; - -/** Provide a remote candidate to the ICE Agent. */ -- (void)addIceCandidate:(RTCIceCandidate *)candidate; - -/** Remove a group of remote candidates from the ICE Agent. */ -- (void)removeIceCandidates:(NSArray *)candidates; - -/** Add a new media stream to be sent on this peer connection. - * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use - * addTrack instead. - */ -- (void)addStream:(RTCMediaStream *)stream; - -/** Remove the given media stream from this peer connection. - * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use - * removeTrack instead. - */ -- (void)removeStream:(RTCMediaStream *)stream; - -/** Add a new media stream track to be sent on this peer connection, and return - * the newly created RTCRtpSender. The RTCRtpSender will be associated with - * the streams specified in the |streamIds| list. - * - * Errors: If an error occurs, returns nil. An error can occur if: - * - A sender already exists for the track. - * - The peer connection is closed. - */ -- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray *)streamIds; - -/** With PlanB semantics, removes an RTCRtpSender from this peer connection. - * - * With UnifiedPlan semantics, sets sender's track to null and removes the - * send component from the associated RTCRtpTransceiver's direction. - * - * Returns YES on success. - */ -- (BOOL)removeTrack:(RTCRtpSender *)sender; - -/** addTransceiver creates a new RTCRtpTransceiver and adds it to the set of - * transceivers. Adding a transceiver will cause future calls to CreateOffer - * to add a media description for the corresponding transceiver. - * - * The initial value of |mid| in the returned transceiver is nil. Setting a - * new session description may change it to a non-nil value. - * - * https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver - * - * Optionally, an RtpTransceiverInit structure can be specified to configure - * the transceiver from construction. If not specified, the transceiver will - * default to having a direction of kSendRecv and not be part of any streams. - * - * These methods are only available when Unified Plan is enabled (see - * RTCConfiguration). - */ - -/** Adds a transceiver with a sender set to transmit the given track. The kind - * of the transceiver (and sender/receiver) will be derived from the kind of - * the track. - */ -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track; -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track - init:(RTCRtpTransceiverInit *)init; - -/** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio - * or RTCRtpMediaTypeVideo. - */ -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType; -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType - init:(RTCRtpTransceiverInit *)init; - -/** Generate an SDP offer. */ -- (void)offerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp, - NSError *_Nullable error))completionHandler; - -/** Generate an SDP answer. */ -- (void)answerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp, - NSError *_Nullable error))completionHandler; - -/** Apply the supplied RTCSessionDescription as the local description. */ -- (void)setLocalDescription:(RTCSessionDescription *)sdp - completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler; - -/** Apply the supplied RTCSessionDescription as the remote description. */ -- (void)setRemoteDescription:(RTCSessionDescription *)sdp - completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler; - -/** Limits the bandwidth allocated for all RTP streams sent by this - * PeerConnection. Nil parameters will be unchanged. Setting - * |currentBitrateBps| will force the available bitrate estimate to the given - * value. Returns YES if the parameters were successfully updated. - */ -- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps - currentBitrateBps:(nullable NSNumber *)currentBitrateBps - maxBitrateBps:(nullable NSNumber *)maxBitrateBps; - -/** Start or stop recording an Rtc EventLog. */ -- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes; -- (void)stopRtcEventLog; - -@end - -@interface RTCPeerConnection (Media) - -/** Create an RTCRtpSender with the specified kind and media stream ID. - * See RTCMediaStreamTrack.h for available kinds. - * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use - * addTransceiver instead. - */ -- (RTCRtpSender *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId; - -@end - -@interface RTCPeerConnection (DataChannel) - -/** Create a new data channel with the given label and configuration. */ -- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label - configuration:(RTCDataChannelConfiguration *)configuration; - -@end - -typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *); - -@interface RTCPeerConnection (Stats) - -/** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil - * statistics are gathered for all tracks. - */ -- (void)statsForTrack:(nullable RTCMediaStreamTrack *)mediaStreamTrack - statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel - completionHandler:(nullable void (^)(NSArray *stats))completionHandler; - -/** Gather statistic through the v2 statistics API. */ -- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler; - -/** Spec-compliant getStats() performing the stats selection algorithm with the - * sender. - */ -- (void)statisticsForSender:(RTCRtpSender *)sender - completionHandler:(RTCStatisticsCompletionHandler)completionHandler; - -/** Spec-compliant getStats() performing the stats selection algorithm with the - * receiver. - */ -- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver - completionHandler:(RTCStatisticsCompletionHandler)completionHandler; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnectionFactory.h b/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnectionFactory.h deleted file mode 100644 index c808218b54..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnectionFactory.h +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCAudioSource; -@class RTCAudioTrack; -@class RTCConfiguration; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCPeerConnection; -@class RTCVideoSource; -@class RTCVideoTrack; -@class RTCPeerConnectionFactoryOptions; -@protocol RTCPeerConnectionDelegate; -@protocol RTCVideoDecoderFactory; -@protocol RTCVideoEncoderFactory; - -RTC_OBJC_EXPORT -@interface RTCPeerConnectionFactory : NSObject - -/* Initialize object with default H264 video encoder/decoder factories */ -- (instancetype)init; - -/* Initialize object with injectable video encoder/decoder factories */ -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory; - -/** Initialize an RTCAudioSource with constraints. */ -- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints; - -/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source with no - * constraints. - */ -- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId; - -/** Initialize an RTCAudioTrack with a source and an id. */ -- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source trackId:(NSString *)trackId; - -/** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer - * implementation, e.g. RTCCameraVideoCapturer, in order to produce frames. - */ -- (RTCVideoSource *)videoSource; - -/** Initialize an RTCVideoTrack with a source and an id. */ -- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source trackId:(NSString *)trackId; - -/** Initialize an RTCMediaStream with an id. */ -- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId; - -/** Initialize an RTCPeerConnection with a configuration, constraints, and - * delegate. - */ -- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate: - (nullable id)delegate; - -/** Set the options to be used for subsequently created RTCPeerConnections */ -- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options; - -/** Start an AecDump recording. This API call will likely change in the future. */ -- (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes; - -/* Stop an active AecDump recording */ -- (void)stopAecDump; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnectionFactoryOptions.h b/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnectionFactoryOptions.h deleted file mode 100644 index 4bec8695bd..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCPeerConnectionFactoryOptions.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCPeerConnectionFactoryOptions : NSObject - -@property(nonatomic, assign) BOOL disableEncryption; - -@property(nonatomic, assign) BOOL disableNetworkMonitor; - -@property(nonatomic, assign) BOOL ignoreLoopbackNetworkAdapter; - -@property(nonatomic, assign) BOOL ignoreVPNNetworkAdapter; - -@property(nonatomic, assign) BOOL ignoreCellularNetworkAdapter; - -@property(nonatomic, assign) BOOL ignoreWiFiNetworkAdapter; - -@property(nonatomic, assign) BOOL ignoreEthernetNetworkAdapter; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtcpParameters.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtcpParameters.h deleted file mode 100644 index 5c265806b1..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtcpParameters.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCRtcpParameters : NSObject - -/** The Canonical Name used by RTCP. */ -@property(nonatomic, readonly, copy) NSString *cname; - -/** Whether reduced size RTCP is configured or compound RTCP. */ -@property(nonatomic, assign) BOOL isReducedSize; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpCodecParameters.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpCodecParameters.h deleted file mode 100644 index 5d3cac5c96..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpCodecParameters.h +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXTERN const NSString *const kRTCRtxCodecName; -RTC_EXTERN const NSString *const kRTCRedCodecName; -RTC_EXTERN const NSString *const kRTCUlpfecCodecName; -RTC_EXTERN const NSString *const kRTCFlexfecCodecName; -RTC_EXTERN const NSString *const kRTCOpusCodecName; -RTC_EXTERN const NSString *const kRTCIsacCodecName; -RTC_EXTERN const NSString *const kRTCL16CodecName; -RTC_EXTERN const NSString *const kRTCG722CodecName; -RTC_EXTERN const NSString *const kRTCIlbcCodecName; -RTC_EXTERN const NSString *const kRTCPcmuCodecName; -RTC_EXTERN const NSString *const kRTCPcmaCodecName; -RTC_EXTERN const NSString *const kRTCDtmfCodecName; -RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; -RTC_EXTERN const NSString *const kRTCVp8CodecName; -RTC_EXTERN const NSString *const kRTCVp9CodecName; -RTC_EXTERN const NSString *const kRTCH264CodecName; - -/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTCRtpCodecParameters */ -RTC_OBJC_EXPORT -@interface RTCRtpCodecParameters : NSObject - -/** The RTP payload type. */ -@property(nonatomic, assign) int payloadType; - -/** - * The codec MIME subtype. Valid types are listed in: - * http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-2 - * - * Several supported types are represented by the constants above. - */ -@property(nonatomic, readonly, nonnull) NSString *name; - -/** - * The media type of this codec. Equivalent to MIME top-level type. - * - * Valid values are kRTCMediaStreamTrackKindAudio and - * kRTCMediaStreamTrackKindVideo. - */ -@property(nonatomic, readonly, nonnull) NSString *kind; - -/** The codec clock rate expressed in Hertz. */ -@property(nonatomic, readonly, nullable) NSNumber *clockRate; - -/** - * The number of channels (mono=1, stereo=2). - * Set to null for video codecs. - **/ -@property(nonatomic, readonly, nullable) NSNumber *numChannels; - -/** The "format specific parameters" field from the "a=fmtp" line in the SDP */ -@property(nonatomic, readonly, nonnull) NSDictionary *parameters; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpEncodingParameters.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpEncodingParameters.h deleted file mode 100644 index dce25129b2..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpEncodingParameters.h +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCRtpEncodingParameters : NSObject - -/** The idenfifier for the encoding layer. This is used in simulcast. */ -@property(nonatomic, copy, nullable) NSString *rid; - -/** Controls whether the encoding is currently transmitted. */ -@property(nonatomic, assign) BOOL isActive; - -/** The maximum bitrate to use for the encoding, or nil if there is no - * limit. - */ -@property(nonatomic, copy, nullable) NSNumber *maxBitrateBps; - -/** The minimum bitrate to use for the encoding, or nil if there is no - * limit. - */ -@property(nonatomic, copy, nullable) NSNumber *minBitrateBps; - -/** The maximum framerate to use for the encoding, or nil if there is no - * limit. - */ -@property(nonatomic, copy, nullable) NSNumber *maxFramerate; - -/** The requested number of temporal layers to use for the encoding, or nil - * if the default should be used. - */ -@property(nonatomic, copy, nullable) NSNumber *numTemporalLayers; - -/** Scale the width and height down by this factor for video. If nil, - * implementation default scaling factor will be used. - */ -@property(nonatomic, copy, nullable) NSNumber *scaleResolutionDownBy; - -/** The SSRC being used by this encoding. */ -@property(nonatomic, readonly, nullable) NSNumber *ssrc; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpFragmentationHeader.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpFragmentationHeader.h deleted file mode 100644 index 2e26b08b8a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpFragmentationHeader.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Information for header. Corresponds to webrtc::RTPFragmentationHeader. */ -RTC_OBJC_EXPORT -@interface RTCRtpFragmentationHeader : NSObject - -@property(nonatomic, strong) NSArray *fragmentationOffset; -@property(nonatomic, strong) NSArray *fragmentationLength; -@property(nonatomic, strong) NSArray *fragmentationTimeDiff; -@property(nonatomic, strong) NSArray *fragmentationPlType; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpHeaderExtension.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpHeaderExtension.h deleted file mode 100644 index 32114499ce..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpHeaderExtension.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCRtpHeaderExtension : NSObject - -/** The URI of the RTP header extension, as defined in RFC5285. */ -@property(nonatomic, readonly, copy) NSString *uri; - -/** The value put in the RTP packet to identify the header extension. */ -@property(nonatomic, readonly) int id; - -/** Whether the header extension is encrypted or not. */ -@property(nonatomic, readonly, getter=isEncrypted) BOOL encrypted; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpParameters.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpParameters.h deleted file mode 100644 index c1f2d5cea9..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpParameters.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCRtcpParameters.h" -#import "RTCRtpCodecParameters.h" -#import "RTCRtpEncodingParameters.h" -#import "RTCRtpHeaderExtension.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCRtpParameters : NSObject - -/** A unique identifier for the last set of parameters applied. */ -@property(nonatomic, copy) NSString *transactionId; - -/** Parameters used for RTCP. */ -@property(nonatomic, readonly, copy) RTCRtcpParameters *rtcp; - -/** An array containing parameters for RTP header extensions. */ -@property(nonatomic, readonly, copy) NSArray *headerExtensions; - -/** The currently active encodings in the order of preference. */ -@property(nonatomic, copy) NSArray *encodings; - -/** The negotiated set of send codecs in order of preference. */ -@property(nonatomic, copy) NSArray *codecs; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpReceiver.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpReceiver.h deleted file mode 100644 index 7a7dacea2b..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpReceiver.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCMediaStreamTrack.h" -#import "RTCRtpParameters.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Represents the media type of the RtpReceiver. */ -typedef NS_ENUM(NSInteger, RTCRtpMediaType) { - RTCRtpMediaTypeAudio, - RTCRtpMediaTypeVideo, - RTCRtpMediaTypeData, -}; - -@class RTCRtpReceiver; - -RTC_OBJC_EXPORT -@protocol RTCRtpReceiverDelegate - -/** Called when the first RTP packet is received. - * - * Note: Currently if there are multiple RtpReceivers of the same media type, - * they will all call OnFirstPacketReceived at once. - * - * For example, if we create three audio receivers, A/B/C, they will listen to - * the same signal from the underneath network layer. Whenever the first audio packet - * is received, the underneath signal will be fired. All the receivers A/B/C will be - * notified and the callback of the receiver's delegate will be called. - * - * The process is the same for video receivers. - */ -- (void)rtpReceiver:(RTCRtpReceiver *)rtpReceiver - didReceiveFirstPacketForMediaType:(RTCRtpMediaType)mediaType; - -@end - -RTC_OBJC_EXPORT -@protocol RTCRtpReceiver - -/** A unique identifier for this receiver. */ -@property(nonatomic, readonly) NSString *receiverId; - -/** The currently active RTCRtpParameters, as defined in - * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. - * - * The WebRTC specification only defines RTCRtpParameters in terms of senders, - * but this API also applies them to receivers, similar to ORTC: - * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. - */ -@property(nonatomic, readonly) RTCRtpParameters *parameters; - -/** The RTCMediaStreamTrack associated with the receiver. - * Note: reading this property returns a new instance of - * RTCMediaStreamTrack. Use isEqual: instead of == to compare - * RTCMediaStreamTrack instances. - */ -@property(nonatomic, readonly, nullable) RTCMediaStreamTrack *track; - -/** The delegate for this RtpReceiver. */ -@property(nonatomic, weak) id delegate; - -@end - -RTC_OBJC_EXPORT -@interface RTCRtpReceiver : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpSender.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpSender.h deleted file mode 100644 index 49f61b81d5..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpSender.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCDtmfSender.h" -#import "RTCMacros.h" -#import "RTCMediaStreamTrack.h" -#import "RTCRtpParameters.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@protocol RTCRtpSender - -/** A unique identifier for this sender. */ -@property(nonatomic, readonly) NSString *senderId; - -/** The currently active RTCRtpParameters, as defined in - * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. - */ -@property(nonatomic, copy) RTCRtpParameters *parameters; - -/** The RTCMediaStreamTrack associated with the sender. - * Note: reading this property returns a new instance of - * RTCMediaStreamTrack. Use isEqual: instead of == to compare - * RTCMediaStreamTrack instances. - */ -@property(nonatomic, copy, nullable) RTCMediaStreamTrack *track; - -/** The RTCDtmfSender accociated with the RTP sender. */ -@property(nonatomic, readonly, nullable) id dtmfSender; - -@end - -RTC_OBJC_EXPORT -@interface RTCRtpSender : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpTransceiver.h b/macos/WebRTC.framework/Versions/A/Headers/RTCRtpTransceiver.h deleted file mode 100644 index 8ef3fc1d42..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCRtpTransceiver.h +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCRtpReceiver.h" -#import "RTCRtpSender.h" - -NS_ASSUME_NONNULL_BEGIN - -/** https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection */ -typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) { - RTCRtpTransceiverDirectionSendRecv, - RTCRtpTransceiverDirectionSendOnly, - RTCRtpTransceiverDirectionRecvOnly, - RTCRtpTransceiverDirectionInactive, -}; - -/** Structure for initializing an RTCRtpTransceiver in a call to - * RTCPeerConnection.addTransceiver. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit - */ -RTC_OBJC_EXPORT -@interface RTCRtpTransceiverInit : NSObject - -/** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */ -@property(nonatomic) RTCRtpTransceiverDirection direction; - -/** The added RTCRtpTransceiver will be added to these streams. */ -@property(nonatomic) NSArray *streamIds; - -/** TODO(bugs.webrtc.org/7600): Not implemented. */ -@property(nonatomic) NSArray *sendEncodings; - -@end - -@class RTCRtpTransceiver; - -/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the WebRTC - * specification. A transceiver represents a combination of an RTCRtpSender - * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an - * RTCRtpTransceiver is said to be associated with a media description if its - * mid property is non-nil; otherwise, it is said to be disassociated. - * JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24 - * - * Note that RTCRtpTransceivers are only supported when using - * RTCPeerConnection with Unified Plan SDP. - * - * WebRTC specification for RTCRtpTransceiver, the JavaScript analog: - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver - */ -RTC_OBJC_EXPORT -@protocol RTCRtpTransceiver - -/** Media type of the transceiver. The sender and receiver will also have this - * type. - */ -@property(nonatomic, readonly) RTCRtpMediaType mediaType; - -/** The mid attribute is the mid negotiated and present in the local and - * remote descriptions. Before negotiation is complete, the mid value may be - * nil. After rollbacks, the value may change from a non-nil value to nil. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid - */ -@property(nonatomic, readonly) NSString *mid; - -/** The sender attribute exposes the RTCRtpSender corresponding to the RTP - * media that may be sent with the transceiver's mid. The sender is always - * present, regardless of the direction of media. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender - */ -@property(nonatomic, readonly) RTCRtpSender *sender; - -/** The receiver attribute exposes the RTCRtpReceiver corresponding to the RTP - * media that may be received with the transceiver's mid. The receiver is - * always present, regardless of the direction of media. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver - */ -@property(nonatomic, readonly) RTCRtpReceiver *receiver; - -/** The isStopped attribute indicates that the sender of this transceiver will - * no longer send, and that the receiver will no longer receive. It is true if - * either stop has been called or if setting the local or remote description - * has caused the RTCRtpTransceiver to be stopped. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped - */ -@property(nonatomic, readonly) BOOL isStopped; - -/** The direction attribute indicates the preferred direction of this - * transceiver, which will be used in calls to createOffer and createAnswer. - * An update of directionality does not take effect immediately. Instead, - * future calls to createOffer and createAnswer mark the corresponding media - * descriptions as sendrecv, sendonly, recvonly, or inactive. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction - */ -@property(nonatomic) RTCRtpTransceiverDirection direction; - -/** The currentDirection attribute indicates the current direction negotiated - * for this transceiver. If this transceiver has never been represented in an - * offer/answer exchange, or if the transceiver is stopped, the value is not - * present and this method returns NO. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection - */ -- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut; - -/** The stop method irreversibly stops the RTCRtpTransceiver. The sender of - * this transceiver will no longer send, the receiver will no longer receive. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop - */ -- (void)stop; - -@end - -RTC_OBJC_EXPORT -@interface RTCRtpTransceiver : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCSSLAdapter.h b/macos/WebRTC.framework/Versions/A/Headers/RTCSSLAdapter.h deleted file mode 100644 index f68bc5e9e3..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCSSLAdapter.h +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -/** - * Initialize and clean up the SSL library. Failure is fatal. These call the - * corresponding functions in webrtc/rtc_base/ssladapter.h. - */ -RTC_EXTERN BOOL RTCInitializeSSL(void); -RTC_EXTERN BOOL RTCCleanupSSL(void); diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCSessionDescription.h b/macos/WebRTC.framework/Versions/A/Headers/RTCSessionDescription.h deleted file mode 100644 index b9bcab1a46..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCSessionDescription.h +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -/** - * Represents the session description type. This exposes the same types that are - * in C++, which doesn't include the rollback type that is in the W3C spec. - */ -typedef NS_ENUM(NSInteger, RTCSdpType) { - RTCSdpTypeOffer, - RTCSdpTypePrAnswer, - RTCSdpTypeAnswer, -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT -@interface RTCSessionDescription : NSObject - -/** The type of session description. */ -@property(nonatomic, readonly) RTCSdpType type; - -/** The SDP string representation of this session description. */ -@property(nonatomic, readonly) NSString *sdp; - -- (instancetype)init NS_UNAVAILABLE; - -/** Initialize a session description with a type and SDP string. */ -- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER; - -+ (NSString *)stringForType:(RTCSdpType)type; - -+ (RTCSdpType)typeForString:(NSString *)string; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCTracing.h b/macos/WebRTC.framework/Versions/A/Headers/RTCTracing.h deleted file mode 100644 index 5c66e5a63a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCTracing.h +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -RTC_EXTERN void RTCSetupInternalTracer(void); -/** Starts capture to specified file. Must be a valid writable path. - * Returns YES if capture starts. - */ -RTC_EXTERN BOOL RTCStartInternalCapture(NSString* filePath); -RTC_EXTERN void RTCStopInternalCapture(void); -RTC_EXTERN void RTCShutdownInternalTracer(void); diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoCapturer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoCapturer.h deleted file mode 100644 index 5212627692..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoCapturer.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCVideoFrame.h" - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCVideoCapturer; - -RTC_OBJC_EXPORT -@protocol RTCVideoCapturerDelegate -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame; -@end - -RTC_OBJC_EXPORT -@interface RTCVideoCapturer : NSObject - -@property(nonatomic, weak) id delegate; - -- (instancetype)initWithDelegate:(id)delegate; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoCodecInfo.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoCodecInfo.h deleted file mode 100644 index 2162caaa21..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoCodecInfo.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */ -RTC_OBJC_EXPORT -@interface RTCVideoCodecInfo : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -- (instancetype)initWithName:(NSString *)name; - -- (instancetype)initWithName:(NSString *)name - parameters:(nullable NSDictionary *)parameters - NS_DESIGNATED_INITIALIZER; - -- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info; - -@property(nonatomic, readonly) NSString *name; -@property(nonatomic, readonly) NSDictionary *parameters; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoder.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoder.h deleted file mode 100644 index 18c6f6b000..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoder.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCCodecSpecificInfo.h" -#import "RTCEncodedImage.h" -#import "RTCMacros.h" -#import "RTCVideoEncoderSettings.h" -#import "RTCVideoFrame.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Callback block for decoder. */ -typedef void (^RTCVideoDecoderCallback)(RTCVideoFrame *frame); - -/** Protocol for decoder implementations. */ -RTC_OBJC_EXPORT -@protocol RTCVideoDecoder - -- (void)setCallback:(RTCVideoDecoderCallback)callback; -- (NSInteger)startDecodeWithSettings:(RTCVideoEncoderSettings *)settings - numberOfCores:(int)numberOfCores - DEPRECATED_MSG_ATTRIBUTE("use startDecodeWithNumberOfCores: instead"); -- (NSInteger)releaseDecoder; -- (NSInteger)decode:(RTCEncodedImage *)encodedImage - missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info - renderTimeMs:(int64_t)renderTimeMs; -- (NSString *)implementationName; - -// TODO(andersc): Make non-optional when `startDecodeWithSettings:numberOfCores:` is removed. -@optional -- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderFactory.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderFactory.h deleted file mode 100644 index 3e24153b82..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderFactory.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoCodecInfo.h" -#import "RTCVideoDecoder.h" - -NS_ASSUME_NONNULL_BEGIN - -/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. */ -RTC_OBJC_EXPORT -@protocol RTCVideoDecoderFactory - -- (nullable id)createDecoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderFactoryH264.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderFactoryH264.h deleted file mode 100644 index 4fcff1dff7..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderFactoryH264.h +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoDecoderFactory.h" - -RTC_OBJC_EXPORT -@interface RTCVideoDecoderFactoryH264 : NSObject -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderH264.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderH264.h deleted file mode 100644 index b860276206..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderH264.h +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoDecoder.h" - -RTC_OBJC_EXPORT -@interface RTCVideoDecoderH264 : NSObject -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderVP8.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderVP8.h deleted file mode 100644 index 00786dc514..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderVP8.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoDecoder.h" - -RTC_OBJC_EXPORT -@interface RTCVideoDecoderVP8 : NSObject - -/* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp8Decoder; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderVP9.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderVP9.h deleted file mode 100644 index b74c1ef999..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoDecoderVP9.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoDecoder.h" - -RTC_OBJC_EXPORT -@interface RTCVideoDecoderVP9 : NSObject - -/* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp9Decoder; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoder.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoder.h deleted file mode 100644 index c5257674d8..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoder.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCCodecSpecificInfo.h" -#import "RTCEncodedImage.h" -#import "RTCMacros.h" -#import "RTCRtpFragmentationHeader.h" -#import "RTCVideoEncoderQpThresholds.h" -#import "RTCVideoEncoderSettings.h" -#import "RTCVideoFrame.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Callback block for encoder. */ -typedef BOOL (^RTCVideoEncoderCallback)(RTCEncodedImage *frame, - id info, - RTCRtpFragmentationHeader *header); - -/** Protocol for encoder implementations. */ -RTC_OBJC_EXPORT -@protocol RTCVideoEncoder - -- (void)setCallback:(RTCVideoEncoderCallback)callback; -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings - numberOfCores:(int)numberOfCores; -- (NSInteger)releaseEncoder; -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)info - frameTypes:(NSArray *)frameTypes; -- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate; -- (NSString *)implementationName; - -/** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to - * keep the QP from the encoded images within the given range. Returning nil from this function - * disables quality scaling. */ -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderFactory.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderFactory.h deleted file mode 100644 index 20c603d6fe..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderFactory.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoCodecInfo.h" -#import "RTCVideoEncoder.h" - -NS_ASSUME_NONNULL_BEGIN - -/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. */ -RTC_OBJC_EXPORT -@protocol RTCVideoEncoderFactory - -- (nullable id)createEncoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderFactoryH264.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderFactoryH264.h deleted file mode 100644 index c64405e4da..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderFactoryH264.h +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoEncoderFactory.h" - -RTC_OBJC_EXPORT -@interface RTCVideoEncoderFactoryH264 : NSObject -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderH264.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderH264.h deleted file mode 100644 index a9c05580a4..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderH264.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoCodecInfo.h" -#import "RTCVideoEncoder.h" - -RTC_OBJC_EXPORT -@interface RTCVideoEncoderH264 : NSObject - -- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderQpThresholds.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderQpThresholds.h deleted file mode 100644 index 2b48f45ce0..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderQpThresholds.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -/** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */ -RTC_OBJC_EXPORT -@interface RTCVideoEncoderQpThresholds : NSObject - -- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high; - -@property(nonatomic, readonly) NSInteger low; -@property(nonatomic, readonly) NSInteger high; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderSettings.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderSettings.h deleted file mode 100644 index a9403f8dec..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderSettings.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) { - RTCVideoCodecModeRealtimeVideo, - RTCVideoCodecModeScreensharing, -}; - -/** Settings for encoder. Corresponds to webrtc::VideoCodec. */ -RTC_OBJC_EXPORT -@interface RTCVideoEncoderSettings : NSObject - -@property(nonatomic, strong) NSString *name; - -@property(nonatomic, assign) unsigned short width; -@property(nonatomic, assign) unsigned short height; - -@property(nonatomic, assign) unsigned int startBitrate; // kilobits/sec. -@property(nonatomic, assign) unsigned int maxBitrate; -@property(nonatomic, assign) unsigned int minBitrate; - -@property(nonatomic, assign) uint32_t maxFramerate; - -@property(nonatomic, assign) unsigned int qpMax; -@property(nonatomic, assign) RTCVideoCodecMode mode; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderVP8.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderVP8.h deleted file mode 100644 index 8d87a89893..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderVP8.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoEncoder.h" - -RTC_OBJC_EXPORT -@interface RTCVideoEncoderVP8 : NSObject - -/* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp8Encoder; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderVP9.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderVP9.h deleted file mode 100644 index 9efea4be2a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoEncoderVP9.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoEncoder.h" - -RTC_OBJC_EXPORT -@interface RTCVideoEncoderVP9 : NSObject - -/* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp9Encoder; - -@end diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoFrame.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoFrame.h deleted file mode 100644 index 9aca7433f3..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoFrame.h +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -typedef NS_ENUM(NSInteger, RTCVideoRotation) { - RTCVideoRotation_0 = 0, - RTCVideoRotation_90 = 90, - RTCVideoRotation_180 = 180, - RTCVideoRotation_270 = 270, -}; - -@protocol RTCVideoFrameBuffer; - -// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. -RTC_OBJC_EXPORT -@interface RTCVideoFrame : NSObject - -/** Width without rotation applied. */ -@property(nonatomic, readonly) int width; - -/** Height without rotation applied. */ -@property(nonatomic, readonly) int height; -@property(nonatomic, readonly) RTCVideoRotation rotation; - -/** Timestamp in nanoseconds. */ -@property(nonatomic, readonly) int64_t timeStampNs; - -/** Timestamp 90 kHz. */ -@property(nonatomic, assign) int32_t timeStamp; - -@property(nonatomic, readonly) id buffer; - -- (instancetype)init NS_UNAVAILABLE; -- (instancetype) new NS_UNAVAILABLE; - -/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp. - * Deprecated - initialize with a RTCCVPixelBuffer instead - */ -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - rotation:(RTCVideoRotation)rotation - timeStampNs:(int64_t)timeStampNs - DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); - -/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and - * scaling. Cropping will be applied first on the pixel buffer, followed by - * scaling to the final resolution of scaledWidth x scaledHeight. - */ -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - scaledWidth:(int)scaledWidth - scaledHeight:(int)scaledHeight - cropWidth:(int)cropWidth - cropHeight:(int)cropHeight - cropX:(int)cropX - cropY:(int)cropY - rotation:(RTCVideoRotation)rotation - timeStampNs:(int64_t)timeStampNs - DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); - -/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. - */ -- (instancetype)initWithBuffer:(id)frameBuffer - rotation:(RTCVideoRotation)rotation - timeStampNs:(int64_t)timeStampNs; - -/** Return a frame that is guaranteed to be I420, i.e. it is possible to access - * the YUV data on it. - */ -- (RTCVideoFrame *)newI420VideoFrame; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoFrameBuffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoFrameBuffer.h deleted file mode 100644 index bb9e6fba63..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoFrameBuffer.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@protocol RTCI420Buffer; - -// RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer. -RTC_OBJC_EXPORT -@protocol RTCVideoFrameBuffer - -@property(nonatomic, readonly) int width; -@property(nonatomic, readonly) int height; - -- (id)toI420; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoRenderer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoRenderer.h deleted file mode 100644 index 7b359a35c2..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoRenderer.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#if TARGET_OS_IPHONE -#import -#endif - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCVideoFrame; - -RTC_OBJC_EXPORT -@protocol RTCVideoRenderer - -/** The size of the frame. */ -- (void)setSize:(CGSize)size; - -/** The frame to be displayed. */ -- (void)renderFrame:(nullable RTCVideoFrame *)frame; - -@end - -RTC_OBJC_EXPORT -@protocol RTCVideoViewDelegate - -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoSource.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoSource.h deleted file mode 100644 index ec8a45c1c2..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoSource.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCMediaSource.h" -#import "RTCVideoCapturer.h" - -NS_ASSUME_NONNULL_BEGIN - -RTC_OBJC_EXPORT - -@interface RTCVideoSource : RTCMediaSource - -- (instancetype)init NS_UNAVAILABLE; - -/** - * Calling this function will cause frames to be scaled down to the - * requested resolution. Also, frames will be cropped to match the - * requested aspect ratio, and frames will be dropped to match the - * requested fps. The requested aspect ratio is orientation agnostic and - * will be adjusted to maintain the input orientation, so it doesn't - * matter if e.g. 1280x720 or 720x1280 is requested. - */ -- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoTrack.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoTrack.h deleted file mode 100644 index b946889eb4..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoTrack.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMediaStreamTrack.h" - -#import "RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@protocol RTCVideoRenderer; -@class RTCPeerConnectionFactory; -@class RTCVideoSource; - -RTC_OBJC_EXPORT -@interface RTCVideoTrack : RTCMediaStreamTrack - -/** The video source for this video track. */ -@property(nonatomic, readonly) RTCVideoSource *source; - -- (instancetype)init NS_UNAVAILABLE; - -/** Register a renderer that will render all frames received on this track. */ -- (void)addRenderer:(id)renderer; - -/** Deregister a renderer. */ -- (void)removeRenderer:(id)renderer; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoViewShading.h b/macos/WebRTC.framework/Versions/A/Headers/RTCVideoViewShading.h deleted file mode 100644 index 6876cc3ab5..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCVideoViewShading.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCVideoFrame.h" - -NS_ASSUME_NONNULL_BEGIN - -/** - * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in - * rendering for the RTCEAGLVideoView/RTCNSGLVideoView. - */ -RTC_OBJC_EXPORT -@protocol RTCVideoViewShading - -/** Callback for I420 frames. Each plane is given as a texture. */ -- (void)applyShadingForFrameWithWidth:(int)width - height:(int)height - rotation:(RTCVideoRotation)rotation - yPlane:(GLuint)yPlane - uPlane:(GLuint)uPlane - vPlane:(GLuint)vPlane; - -/** Callback for NV12 frames. Each plane is given as a texture. */ -- (void)applyShadingForFrameWithWidth:(int)width - height:(int)height - rotation:(RTCVideoRotation)rotation - yPlane:(GLuint)yPlane - uvPlane:(GLuint)uvPlane; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/RTCYUVPlanarBuffer.h b/macos/WebRTC.framework/Versions/A/Headers/RTCYUVPlanarBuffer.h deleted file mode 100644 index 8ceb66c99d..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/RTCYUVPlanarBuffer.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "RTCMacros.h" -#import "RTCVideoFrameBuffer.h" - -NS_ASSUME_NONNULL_BEGIN - -/** Protocol for RTCVideoFrameBuffers containing YUV planar data. */ -RTC_OBJC_EXPORT -@protocol RTCYUVPlanarBuffer - -@property(nonatomic, readonly) int chromaWidth; -@property(nonatomic, readonly) int chromaHeight; -@property(nonatomic, readonly) const uint8_t *dataY; -@property(nonatomic, readonly) const uint8_t *dataU; -@property(nonatomic, readonly) const uint8_t *dataV; -@property(nonatomic, readonly) int strideY; -@property(nonatomic, readonly) int strideU; -@property(nonatomic, readonly) int strideV; - -- (instancetype)initWithWidth:(int)width - height:(int)height - dataY:(const uint8_t *)dataY - dataU:(const uint8_t *)dataU - dataV:(const uint8_t *)dataV; -- (instancetype)initWithWidth:(int)width height:(int)height; -- (instancetype)initWithWidth:(int)width - height:(int)height - strideY:(int)strideY - strideU:(int)strideU - strideV:(int)strideV; - -@end - -NS_ASSUME_NONNULL_END diff --git a/macos/WebRTC.framework/Versions/A/Headers/WebRTC.h b/macos/WebRTC.framework/Versions/A/Headers/WebRTC.h deleted file mode 100644 index 31b5686a34..0000000000 --- a/macos/WebRTC.framework/Versions/A/Headers/WebRTC.h +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import diff --git a/macos/WebRTC.framework/Versions/A/Modules/module.modulemap b/macos/WebRTC.framework/Versions/A/Modules/module.modulemap deleted file mode 100644 index cd485a4e81..0000000000 --- a/macos/WebRTC.framework/Versions/A/Modules/module.modulemap +++ /dev/null @@ -1,6 +0,0 @@ -framework module WebRTC { - umbrella header "WebRTC.h" - - export * - module * { export * } -} diff --git a/macos/WebRTC.framework/Versions/A/Resources/Info.plist b/macos/WebRTC.framework/Versions/A/Resources/Info.plist deleted file mode 100644 index a90dbf557a..0000000000 --- a/macos/WebRTC.framework/Versions/A/Resources/Info.plist +++ /dev/null @@ -1,38 +0,0 @@ - - - - - BuildMachineOSBuild - 18G2022 - CFBundleDevelopmentRegion - en - CFBundleExecutable - WebRTC - CFBundleIdentifier - org.webrtc.WebRTC - CFBundleInfoDictionaryVersion - 6.0 - CFBundleName - WebRTC - CFBundlePackageType - FMWK - CFBundleShortVersionString - 1.0 - CFBundleSignature - ???? - CFBundleVersion - 1.0 - DTCompiler - com.apple.compilers.llvm.clang.1_0 - DTSDKBuild - 10.15 - DTSDKName - macosx10.15 - DTXcode - 1121 - DTXcodeBuild - 11B500 - NSPrincipalClass - - - diff --git a/macos/WebRTC.framework/Versions/Current b/macos/WebRTC.framework/Versions/Current deleted file mode 120000 index 8c7e5a667f..0000000000 --- a/macos/WebRTC.framework/Versions/Current +++ /dev/null @@ -1 +0,0 @@ -A \ No newline at end of file diff --git a/macos/WebRTC.framework/WebRTC b/macos/WebRTC.framework/WebRTC deleted file mode 120000 index 4172a9ea97..0000000000 --- a/macos/WebRTC.framework/WebRTC +++ /dev/null @@ -1 +0,0 @@ -Versions/Current/WebRTC \ No newline at end of file diff --git a/macos/flutter_webrtc.podspec b/macos/flutter_webrtc.podspec index 1a2a131461..7baf7ea75f 100644 --- a/macos/flutter_webrtc.podspec +++ b/macos/flutter_webrtc.podspec @@ -3,7 +3,7 @@ # Pod::Spec.new do |s| s.name = 'flutter_webrtc' - s.version = '0.2.2' + s.version = '0.14.0' s.summary = 'Flutter WebRTC plugin for macOS.' s.description = <<-DESC A new flutter plugin project. @@ -14,13 +14,7 @@ A new flutter plugin project. s.source = { :path => '.' } s.source_files = ['Classes/**/*'] - s.vendored_frameworks = 'WebRTC.framework' - s.private_header_files = 'third_party/include/**/*' - $dir = File.dirname(__FILE__) + "/third_party/include" - s.pod_target_xcconfig = { "HEADER_SEARCH_PATHS" => $dir} - s.vendored_libraries = 'third_party/lib/*.a' - s.dependency 'FlutterMacOS' - s.platform = :osx - s.osx.deployment_target = '10.11' + s.dependency 'WebRTC-SDK', '125.6422.07' + s.osx.deployment_target = '10.14' end diff --git a/macos/third_party/include/libyuv.h b/macos/third_party/include/libyuv.h deleted file mode 100644 index aeffd5ef7a..0000000000 --- a/macos/third_party/include/libyuv.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_H_ -#define INCLUDE_LIBYUV_H_ - -#include "libyuv/basic_types.h" -#include "libyuv/compare.h" -#include "libyuv/convert.h" -#include "libyuv/convert_argb.h" -#include "libyuv/convert_from.h" -#include "libyuv/convert_from_argb.h" -#include "libyuv/cpu_id.h" -#include "libyuv/mjpeg_decoder.h" -#include "libyuv/planar_functions.h" -#include "libyuv/rotate.h" -#include "libyuv/rotate_argb.h" -#include "libyuv/row.h" -#include "libyuv/scale.h" -#include "libyuv/scale_argb.h" -#include "libyuv/scale_row.h" -#include "libyuv/version.h" -#include "libyuv/video_common.h" - -#endif // INCLUDE_LIBYUV_H_ diff --git a/macos/third_party/include/libyuv/basic_types.h b/macos/third_party/include/libyuv/basic_types.h deleted file mode 100644 index 1bea67f2f2..0000000000 --- a/macos/third_party/include/libyuv/basic_types.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_BASIC_TYPES_H_ -#define INCLUDE_LIBYUV_BASIC_TYPES_H_ - -#include // For size_t and NULL - -#if !defined(INT_TYPES_DEFINED) && !defined(GG_LONGLONG) -#define INT_TYPES_DEFINED - -#if defined(_MSC_VER) && (_MSC_VER < 1600) -#include // for uintptr_t on x86 -typedef unsigned __int64 uint64_t; -typedef __int64 int64_t; -typedef unsigned int uint32_t; -typedef int int32_t; -typedef unsigned short uint16_t; -typedef short int16_t; -typedef unsigned char uint8_t; -typedef signed char int8_t; -#else -#include // for uintptr_t and C99 types -#endif // defined(_MSC_VER) && (_MSC_VER < 1600) -// Types are deprecated. Enable this macro for legacy types. -#ifdef LIBYUV_LEGACY_TYPES -typedef uint64_t uint64; -typedef int64_t int64; -typedef uint32_t uint32; -typedef int32_t int32; -typedef uint16_t uint16; -typedef int16_t int16; -typedef uint8_t uint8; -typedef int8_t int8; -#endif // LIBYUV_LEGACY_TYPES -#endif // INT_TYPES_DEFINED - -#if !defined(LIBYUV_API) -#if defined(_WIN32) || defined(__CYGWIN__) -#if defined(LIBYUV_BUILDING_SHARED_LIBRARY) -#define LIBYUV_API __declspec(dllexport) -#elif defined(LIBYUV_USING_SHARED_LIBRARY) -#define LIBYUV_API __declspec(dllimport) -#else -#define LIBYUV_API -#endif // LIBYUV_BUILDING_SHARED_LIBRARY -#elif defined(__GNUC__) && (__GNUC__ >= 4) && !defined(__APPLE__) && \ - (defined(LIBYUV_BUILDING_SHARED_LIBRARY) || \ - defined(LIBYUV_USING_SHARED_LIBRARY)) -#define LIBYUV_API __attribute__((visibility("default"))) -#else -#define LIBYUV_API -#endif // __GNUC__ -#endif // LIBYUV_API - -// TODO(fbarchard): Remove bool macros. -#define LIBYUV_BOOL int -#define LIBYUV_FALSE 0 -#define LIBYUV_TRUE 1 - -#endif // INCLUDE_LIBYUV_BASIC_TYPES_H_ diff --git a/macos/third_party/include/libyuv/compare.h b/macos/third_party/include/libyuv/compare.h deleted file mode 100644 index 3353ad71c6..0000000000 --- a/macos/third_party/include/libyuv/compare.h +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_COMPARE_H_ -#define INCLUDE_LIBYUV_COMPARE_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Compute a hash for specified memory. Seed of 5381 recommended. -LIBYUV_API -uint32_t HashDjb2(const uint8_t* src, uint64_t count, uint32_t seed); - -// Hamming Distance -LIBYUV_API -uint64_t ComputeHammingDistance(const uint8_t* src_a, - const uint8_t* src_b, - int count); - -// Scan an opaque argb image and return fourcc based on alpha offset. -// Returns FOURCC_ARGB, FOURCC_BGRA, or 0 if unknown. -LIBYUV_API -uint32_t ARGBDetect(const uint8_t* argb, - int stride_argb, - int width, - int height); - -// Sum Square Error - used to compute Mean Square Error or PSNR. -LIBYUV_API -uint64_t ComputeSumSquareError(const uint8_t* src_a, - const uint8_t* src_b, - int count); - -LIBYUV_API -uint64_t ComputeSumSquareErrorPlane(const uint8_t* src_a, - int stride_a, - const uint8_t* src_b, - int stride_b, - int width, - int height); - -static const int kMaxPsnr = 128; - -LIBYUV_API -double SumSquareErrorToPsnr(uint64_t sse, uint64_t count); - -LIBYUV_API -double CalcFramePsnr(const uint8_t* src_a, - int stride_a, - const uint8_t* src_b, - int stride_b, - int width, - int height); - -LIBYUV_API -double I420Psnr(const uint8_t* src_y_a, - int stride_y_a, - const uint8_t* src_u_a, - int stride_u_a, - const uint8_t* src_v_a, - int stride_v_a, - const uint8_t* src_y_b, - int stride_y_b, - const uint8_t* src_u_b, - int stride_u_b, - const uint8_t* src_v_b, - int stride_v_b, - int width, - int height); - -LIBYUV_API -double CalcFrameSsim(const uint8_t* src_a, - int stride_a, - const uint8_t* src_b, - int stride_b, - int width, - int height); - -LIBYUV_API -double I420Ssim(const uint8_t* src_y_a, - int stride_y_a, - const uint8_t* src_u_a, - int stride_u_a, - const uint8_t* src_v_a, - int stride_v_a, - const uint8_t* src_y_b, - int stride_y_b, - const uint8_t* src_u_b, - int stride_u_b, - const uint8_t* src_v_b, - int stride_v_b, - int width, - int height); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_COMPARE_H_ diff --git a/macos/third_party/include/libyuv/compare_row.h b/macos/third_party/include/libyuv/compare_row.h deleted file mode 100644 index e95b9d93eb..0000000000 --- a/macos/third_party/include/libyuv/compare_row.h +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2013 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_COMPARE_ROW_H_ -#define INCLUDE_LIBYUV_COMPARE_ROW_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -#if defined(__pnacl__) || defined(__CLR_VER) || \ - (defined(__native_client__) && defined(__x86_64__)) || \ - (defined(__i386__) && !defined(__SSE__) && !defined(__clang__)) -#define LIBYUV_DISABLE_X86 -#endif -#if defined(__native_client__) -#define LIBYUV_DISABLE_NEON -#endif -// MemorySanitizer does not support assembly code yet. http://crbug.com/344505 -#if defined(__has_feature) -#if __has_feature(memory_sanitizer) -#define LIBYUV_DISABLE_X86 -#endif -#endif -// Visual C 2012 required for AVX2. -#if defined(_M_IX86) && !defined(__clang__) && defined(_MSC_VER) && \ - _MSC_VER >= 1700 -#define VISUALC_HAS_AVX2 1 -#endif // VisualStudio >= 2012 - -// clang >= 3.4.0 required for AVX2. -#if defined(__clang__) && (defined(__x86_64__) || defined(__i386__)) -#if (__clang_major__ > 3) || (__clang_major__ == 3 && (__clang_minor__ >= 4)) -#define CLANG_HAS_AVX2 1 -#endif // clang >= 3.4 -#endif // __clang__ - -// The following are available for Visual C and GCC: -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || defined(__i386__) || defined(_M_IX86)) -#define HAS_HASHDJB2_SSE41 -#define HAS_SUMSQUAREERROR_SSE2 -#define HAS_HAMMINGDISTANCE_SSE42 -#endif - -// The following are available for Visual C and clangcl 32 bit: -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \ - (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2)) -#define HAS_HASHDJB2_AVX2 -#define HAS_SUMSQUAREERROR_AVX2 -#endif - -// The following are available for GCC and clangcl 64 bit: -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) -#define HAS_HAMMINGDISTANCE_SSSE3 -#endif - -// The following are available for GCC and clangcl 64 bit: -#if !defined(LIBYUV_DISABLE_X86) && defined(CLANG_HAS_AVX2) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) -#define HAS_HAMMINGDISTANCE_AVX2 -#endif - -// The following are available for Neon: -#if !defined(LIBYUV_DISABLE_NEON) && \ - (defined(__ARM_NEON__) || defined(LIBYUV_NEON) || defined(__aarch64__)) -#define HAS_SUMSQUAREERROR_NEON -#define HAS_HAMMINGDISTANCE_NEON -#endif - -#if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) -#define HAS_HAMMINGDISTANCE_MSA -#define HAS_SUMSQUAREERROR_MSA -#endif - -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_HAMMINGDISTANCE_MMI -#define HAS_SUMSQUAREERROR_MMI -#endif - -uint32_t HammingDistance_C(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t HammingDistance_SSE42(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t HammingDistance_SSSE3(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t HammingDistance_AVX2(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t HammingDistance_NEON(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t HammingDistance_MSA(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t HammingDistance_MMI(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t SumSquareError_C(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t SumSquareError_SSE2(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t SumSquareError_AVX2(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t SumSquareError_NEON(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t SumSquareError_MSA(const uint8_t* src_a, - const uint8_t* src_b, - int count); -uint32_t SumSquareError_MMI(const uint8_t* src_a, - const uint8_t* src_b, - int count); - -uint32_t HashDjb2_C(const uint8_t* src, int count, uint32_t seed); -uint32_t HashDjb2_SSE41(const uint8_t* src, int count, uint32_t seed); -uint32_t HashDjb2_AVX2(const uint8_t* src, int count, uint32_t seed); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_COMPARE_ROW_H_ diff --git a/macos/third_party/include/libyuv/convert.h b/macos/third_party/include/libyuv/convert.h deleted file mode 100644 index d12ef24f79..0000000000 --- a/macos/third_party/include/libyuv/convert.h +++ /dev/null @@ -1,406 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_CONVERT_H_ -#define INCLUDE_LIBYUV_CONVERT_H_ - -#include "libyuv/basic_types.h" - -#include "libyuv/rotate.h" // For enum RotationMode. - -// TODO(fbarchard): fix WebRTC source to include following libyuv headers: -#include "libyuv/convert_argb.h" // For WebRTC I420ToARGB. b/620 -#include "libyuv/convert_from.h" // For WebRTC ConvertFromI420. b/620 -#include "libyuv/planar_functions.h" // For WebRTC I420Rect, CopyPlane. b/618 - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Convert I444 to I420. -LIBYUV_API -int I444ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert I422 to I420. -LIBYUV_API -int I422ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Copy I420 to I420. -#define I420ToI420 I420Copy -LIBYUV_API -int I420Copy(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Copy I010 to I010 -#define I010ToI010 I010Copy -#define H010ToH010 I010Copy -LIBYUV_API -int I010Copy(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint16_t* dst_y, - int dst_stride_y, - uint16_t* dst_u, - int dst_stride_u, - uint16_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert 10 bit YUV to 8 bit -#define H010ToH420 I010ToI420 -LIBYUV_API -int I010ToI420(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert I400 (grey) to I420. -LIBYUV_API -int I400ToI420(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -#define J400ToJ420 I400ToI420 - -// Convert NV12 to I420. -LIBYUV_API -int NV12ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert NV21 to I420. -LIBYUV_API -int NV21ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert YUY2 to I420. -LIBYUV_API -int YUY2ToI420(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert UYVY to I420. -LIBYUV_API -int UYVYToI420(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert M420 to I420. -LIBYUV_API -int M420ToI420(const uint8_t* src_m420, - int src_stride_m420, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert Android420 to I420. -LIBYUV_API -int Android420ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_pixel_stride_uv, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// ARGB little endian (bgra in memory) to I420. -LIBYUV_API -int ARGBToI420(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// BGRA little endian (argb in memory) to I420. -LIBYUV_API -int BGRAToI420(const uint8_t* src_bgra, - int src_stride_bgra, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// ABGR little endian (rgba in memory) to I420. -LIBYUV_API -int ABGRToI420(const uint8_t* src_abgr, - int src_stride_abgr, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// RGBA little endian (abgr in memory) to I420. -LIBYUV_API -int RGBAToI420(const uint8_t* src_rgba, - int src_stride_rgba, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// RGB little endian (bgr in memory) to I420. -LIBYUV_API -int RGB24ToI420(const uint8_t* src_rgb24, - int src_stride_rgb24, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// RGB big endian (rgb in memory) to I420. -LIBYUV_API -int RAWToI420(const uint8_t* src_raw, - int src_stride_raw, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// RGB16 (RGBP fourcc) little endian to I420. -LIBYUV_API -int RGB565ToI420(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// RGB15 (RGBO fourcc) little endian to I420. -LIBYUV_API -int ARGB1555ToI420(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// RGB12 (R444 fourcc) little endian to I420. -LIBYUV_API -int ARGB4444ToI420(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -#ifdef HAVE_JPEG -// src_width/height provided by capture. -// dst_width/height for clipping determine final size. -LIBYUV_API -int MJPGToI420(const uint8_t* sample, - size_t sample_size, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int src_width, - int src_height, - int dst_width, - int dst_height); - -// Query size of MJPG in pixels. -LIBYUV_API -int MJPGSize(const uint8_t* sample, - size_t sample_size, - int* width, - int* height); -#endif - -// Convert camera sample to I420 with cropping, rotation and vertical flip. -// "src_size" is needed to parse MJPG. -// "dst_stride_y" number of bytes in a row of the dst_y plane. -// Normally this would be the same as dst_width, with recommended alignment -// to 16 bytes for better efficiency. -// If rotation of 90 or 270 is used, stride is affected. The caller should -// allocate the I420 buffer according to rotation. -// "dst_stride_u" number of bytes in a row of the dst_u plane. -// Normally this would be the same as (dst_width + 1) / 2, with -// recommended alignment to 16 bytes for better efficiency. -// If rotation of 90 or 270 is used, stride is affected. -// "crop_x" and "crop_y" are starting position for cropping. -// To center, crop_x = (src_width - dst_width) / 2 -// crop_y = (src_height - dst_height) / 2 -// "src_width" / "src_height" is size of src_frame in pixels. -// "src_height" can be negative indicating a vertically flipped image source. -// "crop_width" / "crop_height" is the size to crop the src to. -// Must be less than or equal to src_width/src_height -// Cropping parameters are pre-rotation. -// "rotation" can be 0, 90, 180 or 270. -// "fourcc" is a fourcc. ie 'I420', 'YUY2' -// Returns 0 for successful; -1 for invalid parameter. Non-zero for failure. -LIBYUV_API -int ConvertToI420(const uint8_t* sample, - size_t sample_size, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int crop_x, - int crop_y, - int src_width, - int src_height, - int crop_width, - int crop_height, - enum RotationMode rotation, - uint32_t fourcc); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_CONVERT_H_ diff --git a/macos/third_party/include/libyuv/convert_argb.h b/macos/third_party/include/libyuv/convert_argb.h deleted file mode 100644 index ab772b6c32..0000000000 --- a/macos/third_party/include/libyuv/convert_argb.h +++ /dev/null @@ -1,687 +0,0 @@ -/* - * Copyright 2012 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_CONVERT_ARGB_H_ -#define INCLUDE_LIBYUV_CONVERT_ARGB_H_ - -#include "libyuv/basic_types.h" - -#include "libyuv/rotate.h" // For enum RotationMode. - -// TODO(fbarchard): This set of functions should exactly match convert.h -// TODO(fbarchard): Add tests. Create random content of right size and convert -// with C vs Opt and or to I420 and compare. -// TODO(fbarchard): Some of these functions lack parameter setting. - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Alias. -#define ARGBToARGB ARGBCopy - -// Copy ARGB to ARGB. -LIBYUV_API -int ARGBCopy(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I420 to ARGB. -LIBYUV_API -int I420ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Duplicate prototype for function in convert_from.h for remoting. -LIBYUV_API -int I420ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert I010 to ARGB. -LIBYUV_API -int I010ToARGB(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I010 to ARGB. -LIBYUV_API -int I010ToARGB(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I010 to ABGR. -LIBYUV_API -int I010ToABGR(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert H010 to ARGB. -LIBYUV_API -int H010ToARGB(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert H010 to ABGR. -LIBYUV_API -int H010ToABGR(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert I422 to ARGB. -LIBYUV_API -int I422ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I444 to ARGB. -LIBYUV_API -int I444ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert J444 to ARGB. -LIBYUV_API -int J444ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I444 to ABGR. -LIBYUV_API -int I444ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert I420 with Alpha to preattenuated ARGB. -LIBYUV_API -int I420AlphaToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - const uint8_t* src_a, - int src_stride_a, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height, - int attenuate); - -// Convert I420 with Alpha to preattenuated ABGR. -LIBYUV_API -int I420AlphaToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - const uint8_t* src_a, - int src_stride_a, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height, - int attenuate); - -// Convert I400 (grey) to ARGB. Reverse of ARGBToI400. -LIBYUV_API -int I400ToARGB(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert J400 (jpeg grey) to ARGB. -LIBYUV_API -int J400ToARGB(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Alias. -#define YToARGB I400ToARGB - -// Convert NV12 to ARGB. -LIBYUV_API -int NV12ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert NV21 to ARGB. -LIBYUV_API -int NV21ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert NV12 to ABGR. -int NV12ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert NV21 to ABGR. -LIBYUV_API -int NV21ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert NV12 to RGB24. -LIBYUV_API -int NV12ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -// Convert NV21 to RGB24. -LIBYUV_API -int NV21ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -// Convert M420 to ARGB. -LIBYUV_API -int M420ToARGB(const uint8_t* src_m420, - int src_stride_m420, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert YUY2 to ARGB. -LIBYUV_API -int YUY2ToARGB(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert UYVY to ARGB. -LIBYUV_API -int UYVYToARGB(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert J420 to ARGB. -LIBYUV_API -int J420ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert J422 to ARGB. -LIBYUV_API -int J422ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert J420 to ABGR. -LIBYUV_API -int J420ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert J422 to ABGR. -LIBYUV_API -int J422ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert H420 to ARGB. -LIBYUV_API -int H420ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert H422 to ARGB. -LIBYUV_API -int H422ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert H420 to ABGR. -LIBYUV_API -int H420ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert H422 to ABGR. -LIBYUV_API -int H422ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert H010 to ARGB. -LIBYUV_API -int H010ToARGB(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I010 to AR30. -LIBYUV_API -int I010ToAR30(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert H010 to AR30. -LIBYUV_API -int H010ToAR30(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert I010 to AB30. -LIBYUV_API -int I010ToAB30(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_ab30, - int dst_stride_ab30, - int width, - int height); - -// Convert H010 to AB30. -LIBYUV_API -int H010ToAB30(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_ab30, - int dst_stride_ab30, - int width, - int height); - -// BGRA little endian (argb in memory) to ARGB. -LIBYUV_API -int BGRAToARGB(const uint8_t* src_bgra, - int src_stride_bgra, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// ABGR little endian (rgba in memory) to ARGB. -LIBYUV_API -int ABGRToARGB(const uint8_t* src_abgr, - int src_stride_abgr, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// RGBA little endian (abgr in memory) to ARGB. -LIBYUV_API -int RGBAToARGB(const uint8_t* src_rgba, - int src_stride_rgba, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Deprecated function name. -#define BG24ToARGB RGB24ToARGB - -// RGB little endian (bgr in memory) to ARGB. -LIBYUV_API -int RGB24ToARGB(const uint8_t* src_rgb24, - int src_stride_rgb24, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// RGB big endian (rgb in memory) to ARGB. -LIBYUV_API -int RAWToARGB(const uint8_t* src_raw, - int src_stride_raw, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// RGB16 (RGBP fourcc) little endian to ARGB. -LIBYUV_API -int RGB565ToARGB(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// RGB15 (RGBO fourcc) little endian to ARGB. -LIBYUV_API -int ARGB1555ToARGB(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// RGB12 (R444 fourcc) little endian to ARGB. -LIBYUV_API -int ARGB4444ToARGB(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Aliases -#define AB30ToARGB AR30ToABGR -#define AB30ToABGR AR30ToARGB -#define AB30ToAR30 AR30ToAB30 - -// Convert AR30 To ARGB. -LIBYUV_API -int AR30ToARGB(const uint8_t* src_ar30, - int src_stride_ar30, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert AR30 To ABGR. -LIBYUV_API -int AR30ToABGR(const uint8_t* src_ar30, - int src_stride_ar30, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert AR30 To AB30. -LIBYUV_API -int AR30ToAB30(const uint8_t* src_ar30, - int src_stride_ar30, - uint8_t* dst_ab30, - int dst_stride_ab30, - int width, - int height); - -#ifdef HAVE_JPEG -// src_width/height provided by capture -// dst_width/height for clipping determine final size. -LIBYUV_API -int MJPGToARGB(const uint8_t* sample, - size_t sample_size, - uint8_t* dst_argb, - int dst_stride_argb, - int src_width, - int src_height, - int dst_width, - int dst_height); -#endif - -// Convert Android420 to ARGB. -LIBYUV_API -int Android420ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_pixel_stride_uv, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert Android420 to ABGR. -LIBYUV_API -int Android420ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_pixel_stride_uv, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert camera sample to ARGB with cropping, rotation and vertical flip. -// "sample_size" is needed to parse MJPG. -// "dst_stride_argb" number of bytes in a row of the dst_argb plane. -// Normally this would be the same as dst_width, with recommended alignment -// to 16 bytes for better efficiency. -// If rotation of 90 or 270 is used, stride is affected. The caller should -// allocate the I420 buffer according to rotation. -// "dst_stride_u" number of bytes in a row of the dst_u plane. -// Normally this would be the same as (dst_width + 1) / 2, with -// recommended alignment to 16 bytes for better efficiency. -// If rotation of 90 or 270 is used, stride is affected. -// "crop_x" and "crop_y" are starting position for cropping. -// To center, crop_x = (src_width - dst_width) / 2 -// crop_y = (src_height - dst_height) / 2 -// "src_width" / "src_height" is size of src_frame in pixels. -// "src_height" can be negative indicating a vertically flipped image source. -// "crop_width" / "crop_height" is the size to crop the src to. -// Must be less than or equal to src_width/src_height -// Cropping parameters are pre-rotation. -// "rotation" can be 0, 90, 180 or 270. -// "fourcc" is a fourcc. ie 'I420', 'YUY2' -// Returns 0 for successful; -1 for invalid parameter. Non-zero for failure. -LIBYUV_API -int ConvertToARGB(const uint8_t* sample, - size_t sample_size, - uint8_t* dst_argb, - int dst_stride_argb, - int crop_x, - int crop_y, - int src_width, - int src_height, - int crop_width, - int crop_height, - enum RotationMode rotation, - uint32_t fourcc); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_CONVERT_ARGB_H_ diff --git a/macos/third_party/include/libyuv/convert_from.h b/macos/third_party/include/libyuv/convert_from.h deleted file mode 100644 index e044d6bec2..0000000000 --- a/macos/third_party/include/libyuv/convert_from.h +++ /dev/null @@ -1,366 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_CONVERT_FROM_H_ -#define INCLUDE_LIBYUV_CONVERT_FROM_H_ - -#include "libyuv/basic_types.h" -#include "libyuv/rotate.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// See Also convert.h for conversions from formats to I420. - -// Convert 8 bit YUV to 10 bit. -#define H420ToH010 I420ToI010 -int I420ToI010(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint16_t* dst_y, - int dst_stride_y, - uint16_t* dst_u, - int dst_stride_u, - uint16_t* dst_v, - int dst_stride_v, - int width, - int height); - -LIBYUV_API -int I420ToI422(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -LIBYUV_API -int I420ToI444(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Copy to I400. Source can be I420, I422, I444, I400, NV12 or NV21. -LIBYUV_API -int I400Copy(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -LIBYUV_API -int I420ToNV12(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_uv, - int dst_stride_uv, - int width, - int height); - -LIBYUV_API -int I420ToNV21(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_vu, - int dst_stride_vu, - int width, - int height); - -LIBYUV_API -int I420ToYUY2(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_yuy2, - int dst_stride_yuy2, - int width, - int height); - -LIBYUV_API -int I420ToUYVY(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_uyvy, - int dst_stride_uyvy, - int width, - int height); - -LIBYUV_API -int I420ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -LIBYUV_API -int I420ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height); - -LIBYUV_API -int I420ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -LIBYUV_API -int I420ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height); - -LIBYUV_API -int I420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -LIBYUV_API -int I420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height); - -LIBYUV_API -int H420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -LIBYUV_API -int H420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height); - -LIBYUV_API -int I420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -LIBYUV_API -int J420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_frame, - int dst_stride_frame, - int width, - int height); - -LIBYUV_API -int H420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_frame, - int dst_stride_frame, - int width, - int height); - -LIBYUV_API -int I422ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -// Convert I420 To RGB565 with 4x4 dither matrix (16 bytes). -// Values in dither matrix from 0 to 7 recommended. -// The order of the dither matrix is first byte is upper left. - -LIBYUV_API -int I420ToRGB565Dither(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const uint8_t* dither4x4, - int width, - int height); - -LIBYUV_API -int I420ToARGB1555(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb1555, - int dst_stride_argb1555, - int width, - int height); - -LIBYUV_API -int I420ToARGB4444(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb4444, - int dst_stride_argb4444, - int width, - int height); - -// Convert I420 to AR30. -LIBYUV_API -int I420ToAR30(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert H420 to AR30. -LIBYUV_API -int H420ToAR30(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert I420 to specified format. -// "dst_sample_stride" is bytes in a row for the destination. Pass 0 if the -// buffer has contiguous rows. Can be negative. A multiple of 16 is optimal. -LIBYUV_API -int ConvertFromI420(const uint8_t* y, - int y_stride, - const uint8_t* u, - int u_stride, - const uint8_t* v, - int v_stride, - uint8_t* dst_sample, - int dst_sample_stride, - int width, - int height, - uint32_t fourcc); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_CONVERT_FROM_H_ diff --git a/macos/third_party/include/libyuv/convert_from_argb.h b/macos/third_party/include/libyuv/convert_from_argb.h deleted file mode 100644 index 05c815a093..0000000000 --- a/macos/third_party/include/libyuv/convert_from_argb.h +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright 2012 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_CONVERT_FROM_ARGB_H_ -#define INCLUDE_LIBYUV_CONVERT_FROM_ARGB_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Copy ARGB to ARGB. -#define ARGBToARGB ARGBCopy -LIBYUV_API -int ARGBCopy(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert ARGB To BGRA. -LIBYUV_API -int ARGBToBGRA(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height); - -// Convert ARGB To ABGR. -LIBYUV_API -int ARGBToABGR(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert ARGB To RGBA. -LIBYUV_API -int ARGBToRGBA(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height); - -// Aliases -#define ARGBToAB30 ABGRToAR30 -#define ABGRToAB30 ARGBToAR30 - -// Convert ABGR To AR30. -LIBYUV_API -int ABGRToAR30(const uint8_t* src_abgr, - int src_stride_abgr, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert ARGB To AR30. -LIBYUV_API -int ARGBToAR30(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert ARGB To RGB24. -LIBYUV_API -int ARGBToRGB24(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -// Convert ARGB To RAW. -LIBYUV_API -int ARGBToRAW(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height); - -// Convert ARGB To RGB565. -LIBYUV_API -int ARGBToRGB565(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -// Convert ARGB To RGB565 with 4x4 dither matrix (16 bytes). -// Values in dither matrix from 0 to 7 recommended. -// The order of the dither matrix is first byte is upper left. -// TODO(fbarchard): Consider pointer to 2d array for dither4x4. -// const uint8_t(*dither)[4][4]; -LIBYUV_API -int ARGBToRGB565Dither(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const uint8_t* dither4x4, - int width, - int height); - -// Convert ARGB To ARGB1555. -LIBYUV_API -int ARGBToARGB1555(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb1555, - int dst_stride_argb1555, - int width, - int height); - -// Convert ARGB To ARGB4444. -LIBYUV_API -int ARGBToARGB4444(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb4444, - int dst_stride_argb4444, - int width, - int height); - -// Convert ARGB To I444. -LIBYUV_API -int ARGBToI444(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert ARGB To I422. -LIBYUV_API -int ARGBToI422(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert ARGB To I420. (also in convert.h) -LIBYUV_API -int ARGBToI420(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert ARGB to J420. (JPeg full range I420). -LIBYUV_API -int ARGBToJ420(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_yj, - int dst_stride_yj, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert ARGB to J422. -LIBYUV_API -int ARGBToJ422(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_yj, - int dst_stride_yj, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert ARGB to J400. (JPeg full range). -LIBYUV_API -int ARGBToJ400(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_yj, - int dst_stride_yj, - int width, - int height); - -// Convert ARGB to I400. -LIBYUV_API -int ARGBToI400(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -// Convert ARGB to G. (Reverse of J400toARGB, which replicates G back to ARGB) -LIBYUV_API -int ARGBToG(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_g, - int dst_stride_g, - int width, - int height); - -// Convert ARGB To NV12. -LIBYUV_API -int ARGBToNV12(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_uv, - int dst_stride_uv, - int width, - int height); - -// Convert ARGB To NV21. -LIBYUV_API -int ARGBToNV21(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_vu, - int dst_stride_vu, - int width, - int height); - -// Convert ARGB To NV21. -LIBYUV_API -int ARGBToNV21(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_vu, - int dst_stride_vu, - int width, - int height); - -// Convert ARGB To YUY2. -LIBYUV_API -int ARGBToYUY2(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_yuy2, - int dst_stride_yuy2, - int width, - int height); - -// Convert ARGB To UYVY. -LIBYUV_API -int ARGBToUYVY(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_uyvy, - int dst_stride_uyvy, - int width, - int height); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_CONVERT_FROM_ARGB_H_ diff --git a/macos/third_party/include/libyuv/cpu_id.h b/macos/third_party/include/libyuv/cpu_id.h deleted file mode 100644 index b01cd25c57..0000000000 --- a/macos/third_party/include/libyuv/cpu_id.h +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_CPU_ID_H_ -#define INCLUDE_LIBYUV_CPU_ID_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Internal flag to indicate cpuid requires initialization. -static const int kCpuInitialized = 0x1; - -// These flags are only valid on ARM processors. -static const int kCpuHasARM = 0x2; -static const int kCpuHasNEON = 0x4; -// 0x8 reserved for future ARM flag. - -// These flags are only valid on x86 processors. -static const int kCpuHasX86 = 0x10; -static const int kCpuHasSSE2 = 0x20; -static const int kCpuHasSSSE3 = 0x40; -static const int kCpuHasSSE41 = 0x80; -static const int kCpuHasSSE42 = 0x100; // unused at this time. -static const int kCpuHasAVX = 0x200; -static const int kCpuHasAVX2 = 0x400; -static const int kCpuHasERMS = 0x800; -static const int kCpuHasFMA3 = 0x1000; -static const int kCpuHasF16C = 0x2000; -static const int kCpuHasGFNI = 0x4000; -static const int kCpuHasAVX512BW = 0x8000; -static const int kCpuHasAVX512VL = 0x10000; -static const int kCpuHasAVX512VBMI = 0x20000; -static const int kCpuHasAVX512VBMI2 = 0x40000; -static const int kCpuHasAVX512VBITALG = 0x80000; -static const int kCpuHasAVX512VPOPCNTDQ = 0x100000; - -// These flags are only valid on MIPS processors. -static const int kCpuHasMIPS = 0x200000; -static const int kCpuHasMSA = 0x400000; -static const int kCpuHasMMI = 0x800000; - -// Optional init function. TestCpuFlag does an auto-init. -// Returns cpu_info flags. -LIBYUV_API -int InitCpuFlags(void); - -// Detect CPU has SSE2 etc. -// Test_flag parameter should be one of kCpuHas constants above. -// Returns non-zero if instruction set is detected -static __inline int TestCpuFlag(int test_flag) { - LIBYUV_API extern int cpu_info_; -#ifdef __ATOMIC_RELAXED - int cpu_info = __atomic_load_n(&cpu_info_, __ATOMIC_RELAXED); -#else - int cpu_info = cpu_info_; -#endif - return (!cpu_info ? InitCpuFlags() : cpu_info) & test_flag; -} - -// Internal function for parsing /proc/cpuinfo. -LIBYUV_API -int ArmCpuCaps(const char* cpuinfo_name); - -// For testing, allow CPU flags to be disabled. -// ie MaskCpuFlags(~kCpuHasSSSE3) to disable SSSE3. -// MaskCpuFlags(-1) to enable all cpu specific optimizations. -// MaskCpuFlags(1) to disable all cpu specific optimizations. -// MaskCpuFlags(0) to reset state so next call will auto init. -// Returns cpu_info flags. -LIBYUV_API -int MaskCpuFlags(int enable_flags); - -// Sets the CPU flags to |cpu_flags|, bypassing the detection code. |cpu_flags| -// should be a valid combination of the kCpuHas constants above and include -// kCpuInitialized. Use this method when running in a sandboxed process where -// the detection code might fail (as it might access /proc/cpuinfo). In such -// cases the cpu_info can be obtained from a non sandboxed process by calling -// InitCpuFlags() and passed to the sandboxed process (via command line -// parameters, IPC...) which can then call this method to initialize the CPU -// flags. -// Notes: -// - when specifying 0 for |cpu_flags|, the auto initialization is enabled -// again. -// - enabling CPU features that are not supported by the CPU will result in -// undefined behavior. -// TODO(fbarchard): consider writing a helper function that translates from -// other library CPU info to libyuv CPU info and add a .md doc that explains -// CPU detection. -static __inline void SetCpuFlags(int cpu_flags) { - LIBYUV_API extern int cpu_info_; -#ifdef __ATOMIC_RELAXED - __atomic_store_n(&cpu_info_, cpu_flags, __ATOMIC_RELAXED); -#else - cpu_info_ = cpu_flags; -#endif -} - -// Low level cpuid for X86. Returns zeros on other CPUs. -// eax is the info type that you want. -// ecx is typically the cpu number, and should normally be zero. -LIBYUV_API -void CpuId(int info_eax, int info_ecx, int* cpu_info); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_CPU_ID_H_ diff --git a/macos/third_party/include/libyuv/macros_msa.h b/macos/third_party/include/libyuv/macros_msa.h deleted file mode 100644 index 29997ce11f..0000000000 --- a/macos/third_party/include/libyuv/macros_msa.h +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright 2016 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_MACROS_MSA_H_ -#define INCLUDE_LIBYUV_MACROS_MSA_H_ - -#if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) -#include -#include - -#if (__mips_isa_rev >= 6) -#define LW(psrc) \ - ({ \ - const uint8_t* psrc_lw_m = (const uint8_t*)(psrc); \ - uint32_t val_m; \ - asm volatile("lw %[val_m], %[psrc_lw_m] \n" \ - : [val_m] "=r"(val_m) \ - : [psrc_lw_m] "m"(*psrc_lw_m)); \ - val_m; \ - }) - -#if (__mips == 64) -#define LD(psrc) \ - ({ \ - const uint8_t* psrc_ld_m = (const uint8_t*)(psrc); \ - uint64_t val_m = 0; \ - asm volatile("ld %[val_m], %[psrc_ld_m] \n" \ - : [val_m] "=r"(val_m) \ - : [psrc_ld_m] "m"(*psrc_ld_m)); \ - val_m; \ - }) -#else // !(__mips == 64) -#define LD(psrc) \ - ({ \ - const uint8_t* psrc_ld_m = (const uint8_t*)(psrc); \ - uint32_t val0_m, val1_m; \ - uint64_t val_m = 0; \ - val0_m = LW(psrc_ld_m); \ - val1_m = LW(psrc_ld_m + 4); \ - val_m = (uint64_t)(val1_m); /* NOLINT */ \ - val_m = (uint64_t)((val_m << 32) & 0xFFFFFFFF00000000); /* NOLINT */ \ - val_m = (uint64_t)(val_m | (uint64_t)val0_m); /* NOLINT */ \ - val_m; \ - }) -#endif // (__mips == 64) - -#define SW(val, pdst) \ - ({ \ - uint8_t* pdst_sw_m = (uint8_t*)(pdst); /* NOLINT */ \ - uint32_t val_m = (val); \ - asm volatile("sw %[val_m], %[pdst_sw_m] \n" \ - : [pdst_sw_m] "=m"(*pdst_sw_m) \ - : [val_m] "r"(val_m)); \ - }) - -#if (__mips == 64) -#define SD(val, pdst) \ - ({ \ - uint8_t* pdst_sd_m = (uint8_t*)(pdst); /* NOLINT */ \ - uint64_t val_m = (val); \ - asm volatile("sd %[val_m], %[pdst_sd_m] \n" \ - : [pdst_sd_m] "=m"(*pdst_sd_m) \ - : [val_m] "r"(val_m)); \ - }) -#else // !(__mips == 64) -#define SD(val, pdst) \ - ({ \ - uint8_t* pdst_sd_m = (uint8_t*)(pdst); /* NOLINT */ \ - uint32_t val0_m, val1_m; \ - val0_m = (uint32_t)((val)&0x00000000FFFFFFFF); \ - val1_m = (uint32_t)(((val) >> 32) & 0x00000000FFFFFFFF); \ - SW(val0_m, pdst_sd_m); \ - SW(val1_m, pdst_sd_m + 4); \ - }) -#endif // !(__mips == 64) -#else // !(__mips_isa_rev >= 6) -#define LW(psrc) \ - ({ \ - const uint8_t* psrc_lw_m = (const uint8_t*)(psrc); \ - uint32_t val_m; \ - asm volatile("ulw %[val_m], %[psrc_lw_m] \n" \ - : [val_m] "=r"(val_m) \ - : [psrc_lw_m] "m"(*psrc_lw_m)); \ - val_m; \ - }) - -#if (__mips == 64) -#define LD(psrc) \ - ({ \ - const uint8_t* psrc_ld_m = (const uint8_t*)(psrc); \ - uint64_t val_m = 0; \ - asm volatile("uld %[val_m], %[psrc_ld_m] \n" \ - : [val_m] "=r"(val_m) \ - : [psrc_ld_m] "m"(*psrc_ld_m)); \ - val_m; \ - }) -#else // !(__mips == 64) -#define LD(psrc) \ - ({ \ - const uint8_t* psrc_ld_m = (const uint8_t*)(psrc); \ - uint32_t val0_m, val1_m; \ - uint64_t val_m = 0; \ - val0_m = LW(psrc_ld_m); \ - val1_m = LW(psrc_ld_m + 4); \ - val_m = (uint64_t)(val1_m); /* NOLINT */ \ - val_m = (uint64_t)((val_m << 32) & 0xFFFFFFFF00000000); /* NOLINT */ \ - val_m = (uint64_t)(val_m | (uint64_t)val0_m); /* NOLINT */ \ - val_m; \ - }) -#endif // (__mips == 64) - -#define SW(val, pdst) \ - ({ \ - uint8_t* pdst_sw_m = (uint8_t*)(pdst); /* NOLINT */ \ - uint32_t val_m = (val); \ - asm volatile("usw %[val_m], %[pdst_sw_m] \n" \ - : [pdst_sw_m] "=m"(*pdst_sw_m) \ - : [val_m] "r"(val_m)); \ - }) - -#define SD(val, pdst) \ - ({ \ - uint8_t* pdst_sd_m = (uint8_t*)(pdst); /* NOLINT */ \ - uint32_t val0_m, val1_m; \ - val0_m = (uint32_t)((val)&0x00000000FFFFFFFF); \ - val1_m = (uint32_t)(((val) >> 32) & 0x00000000FFFFFFFF); \ - SW(val0_m, pdst_sd_m); \ - SW(val1_m, pdst_sd_m + 4); \ - }) -#endif // (__mips_isa_rev >= 6) - -// TODO(fbarchard): Consider removing __VAR_ARGS versions. -#define LD_B(RTYPE, psrc) *((RTYPE*)(psrc)) /* NOLINT */ -#define LD_UB(...) LD_B(const v16u8, __VA_ARGS__) - -#define ST_B(RTYPE, in, pdst) *((RTYPE*)(pdst)) = (in) /* NOLINT */ -#define ST_UB(...) ST_B(v16u8, __VA_ARGS__) - -#define ST_H(RTYPE, in, pdst) *((RTYPE*)(pdst)) = (in) /* NOLINT */ -#define ST_UH(...) ST_H(v8u16, __VA_ARGS__) - -/* Description : Load two vectors with 16 'byte' sized elements - Arguments : Inputs - psrc, stride - Outputs - out0, out1 - Return Type - as per RTYPE - Details : Load 16 byte elements in 'out0' from (psrc) - Load 16 byte elements in 'out1' from (psrc + stride) -*/ -#define LD_B2(RTYPE, psrc, stride, out0, out1) \ - { \ - out0 = LD_B(RTYPE, (psrc)); \ - out1 = LD_B(RTYPE, (psrc) + stride); \ - } -#define LD_UB2(...) LD_B2(const v16u8, __VA_ARGS__) - -#define LD_B4(RTYPE, psrc, stride, out0, out1, out2, out3) \ - { \ - LD_B2(RTYPE, (psrc), stride, out0, out1); \ - LD_B2(RTYPE, (psrc) + 2 * stride, stride, out2, out3); \ - } -#define LD_UB4(...) LD_B4(const v16u8, __VA_ARGS__) - -/* Description : Store two vectors with stride each having 16 'byte' sized - elements - Arguments : Inputs - in0, in1, pdst, stride - Details : Store 16 byte elements from 'in0' to (pdst) - Store 16 byte elements from 'in1' to (pdst + stride) -*/ -#define ST_B2(RTYPE, in0, in1, pdst, stride) \ - { \ - ST_B(RTYPE, in0, (pdst)); \ - ST_B(RTYPE, in1, (pdst) + stride); \ - } -#define ST_UB2(...) ST_B2(v16u8, __VA_ARGS__) - -#define ST_B4(RTYPE, in0, in1, in2, in3, pdst, stride) \ - { \ - ST_B2(RTYPE, in0, in1, (pdst), stride); \ - ST_B2(RTYPE, in2, in3, (pdst) + 2 * stride, stride); \ - } -#define ST_UB4(...) ST_B4(v16u8, __VA_ARGS__) - -/* Description : Store vectors of 8 halfword elements with stride - Arguments : Inputs - in0, in1, pdst, stride - Details : Store 8 halfword elements from 'in0' to (pdst) - Store 8 halfword elements from 'in1' to (pdst + stride) -*/ -#define ST_H2(RTYPE, in0, in1, pdst, stride) \ - { \ - ST_H(RTYPE, in0, (pdst)); \ - ST_H(RTYPE, in1, (pdst) + stride); \ - } -#define ST_UH2(...) ST_H2(v8u16, __VA_ARGS__) - -// TODO(fbarchard): Consider using __msa_vshf_b and __msa_ilvr_b directly. -/* Description : Shuffle byte vector elements as per mask vector - Arguments : Inputs - in0, in1, in2, in3, mask0, mask1 - Outputs - out0, out1 - Return Type - as per RTYPE - Details : Byte elements from 'in0' & 'in1' are copied selectively to - 'out0' as per control vector 'mask0' -*/ -#define VSHF_B2(RTYPE, in0, in1, in2, in3, mask0, mask1, out0, out1) \ - { \ - out0 = (RTYPE)__msa_vshf_b((v16i8)mask0, (v16i8)in1, (v16i8)in0); \ - out1 = (RTYPE)__msa_vshf_b((v16i8)mask1, (v16i8)in3, (v16i8)in2); \ - } -#define VSHF_B2_UB(...) VSHF_B2(v16u8, __VA_ARGS__) - -/* Description : Interleave both left and right half of input vectors - Arguments : Inputs - in0, in1 - Outputs - out0, out1 - Return Type - as per RTYPE - Details : Right half of byte elements from 'in0' and 'in1' are - interleaved and written to 'out0' -*/ -#define ILVRL_B2(RTYPE, in0, in1, out0, out1) \ - { \ - out0 = (RTYPE)__msa_ilvr_b((v16i8)in0, (v16i8)in1); \ - out1 = (RTYPE)__msa_ilvl_b((v16i8)in0, (v16i8)in1); \ - } -#define ILVRL_B2_UB(...) ILVRL_B2(v16u8, __VA_ARGS__) - -#endif /* !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) */ - -#endif // INCLUDE_LIBYUV_MACROS_MSA_H_ diff --git a/macos/third_party/include/libyuv/mjpeg_decoder.h b/macos/third_party/include/libyuv/mjpeg_decoder.h deleted file mode 100644 index 275f8d4c18..0000000000 --- a/macos/third_party/include/libyuv/mjpeg_decoder.h +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2012 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_MJPEG_DECODER_H_ -#define INCLUDE_LIBYUV_MJPEG_DECODER_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -// NOTE: For a simplified public API use convert.h MJPGToI420(). - -struct jpeg_common_struct; -struct jpeg_decompress_struct; -struct jpeg_source_mgr; - -namespace libyuv { - -#ifdef __cplusplus -extern "C" { -#endif - -LIBYUV_BOOL ValidateJpeg(const uint8_t* sample, size_t sample_size); - -#ifdef __cplusplus -} // extern "C" -#endif - -static const uint32_t kUnknownDataSize = 0xFFFFFFFF; - -enum JpegSubsamplingType { - kJpegYuv420, - kJpegYuv422, - kJpegYuv444, - kJpegYuv400, - kJpegUnknown -}; - -struct Buffer { - const uint8_t* data; - int len; -}; - -struct BufferVector { - Buffer* buffers; - int len; - int pos; -}; - -struct SetJmpErrorMgr; - -// MJPEG ("Motion JPEG") is a pseudo-standard video codec where the frames are -// simply independent JPEG images with a fixed huffman table (which is omitted). -// It is rarely used in video transmission, but is common as a camera capture -// format, especially in Logitech devices. This class implements a decoder for -// MJPEG frames. -// -// See http://tools.ietf.org/html/rfc2435 -class LIBYUV_API MJpegDecoder { - public: - typedef void (*CallbackFunction)(void* opaque, - const uint8_t* const* data, - const int* strides, - int rows); - - static const int kColorSpaceUnknown; - static const int kColorSpaceGrayscale; - static const int kColorSpaceRgb; - static const int kColorSpaceYCbCr; - static const int kColorSpaceCMYK; - static const int kColorSpaceYCCK; - - MJpegDecoder(); - ~MJpegDecoder(); - - // Loads a new frame, reads its headers, and determines the uncompressed - // image format. - // Returns LIBYUV_TRUE if image looks valid and format is supported. - // If return value is LIBYUV_TRUE, then the values for all the following - // getters are populated. - // src_len is the size of the compressed mjpeg frame in bytes. - LIBYUV_BOOL LoadFrame(const uint8_t* src, size_t src_len); - - // Returns width of the last loaded frame in pixels. - int GetWidth(); - - // Returns height of the last loaded frame in pixels. - int GetHeight(); - - // Returns format of the last loaded frame. The return value is one of the - // kColorSpace* constants. - int GetColorSpace(); - - // Number of color components in the color space. - int GetNumComponents(); - - // Sample factors of the n-th component. - int GetHorizSampFactor(int component); - - int GetVertSampFactor(int component); - - int GetHorizSubSampFactor(int component); - - int GetVertSubSampFactor(int component); - - // Public for testability. - int GetImageScanlinesPerImcuRow(); - - // Public for testability. - int GetComponentScanlinesPerImcuRow(int component); - - // Width of a component in bytes. - int GetComponentWidth(int component); - - // Height of a component. - int GetComponentHeight(int component); - - // Width of a component in bytes with padding for DCTSIZE. Public for testing. - int GetComponentStride(int component); - - // Size of a component in bytes. - int GetComponentSize(int component); - - // Call this after LoadFrame() if you decide you don't want to decode it - // after all. - LIBYUV_BOOL UnloadFrame(); - - // Decodes the entire image into a one-buffer-per-color-component format. - // dst_width must match exactly. dst_height must be <= to image height; if - // less, the image is cropped. "planes" must have size equal to at least - // GetNumComponents() and they must point to non-overlapping buffers of size - // at least GetComponentSize(i). The pointers in planes are incremented - // to point to after the end of the written data. - // TODO(fbarchard): Add dst_x, dst_y to allow specific rect to be decoded. - LIBYUV_BOOL DecodeToBuffers(uint8_t** planes, int dst_width, int dst_height); - - // Decodes the entire image and passes the data via repeated calls to a - // callback function. Each call will get the data for a whole number of - // image scanlines. - // TODO(fbarchard): Add dst_x, dst_y to allow specific rect to be decoded. - LIBYUV_BOOL DecodeToCallback(CallbackFunction fn, - void* opaque, - int dst_width, - int dst_height); - - // The helper function which recognizes the jpeg sub-sampling type. - static JpegSubsamplingType JpegSubsamplingTypeHelper( - int* subsample_x, - int* subsample_y, - int number_of_components); - - private: - void AllocOutputBuffers(int num_outbufs); - void DestroyOutputBuffers(); - - LIBYUV_BOOL StartDecode(); - LIBYUV_BOOL FinishDecode(); - - void SetScanlinePointers(uint8_t** data); - LIBYUV_BOOL DecodeImcuRow(); - - int GetComponentScanlinePadding(int component); - - // A buffer holding the input data for a frame. - Buffer buf_; - BufferVector buf_vec_; - - jpeg_decompress_struct* decompress_struct_; - jpeg_source_mgr* source_mgr_; - SetJmpErrorMgr* error_mgr_; - - // LIBYUV_TRUE iff at least one component has scanline padding. (i.e., - // GetComponentScanlinePadding() != 0.) - LIBYUV_BOOL has_scanline_padding_; - - // Temporaries used to point to scanline outputs. - int num_outbufs_; // Outermost size of all arrays below. - uint8_t*** scanlines_; - int* scanlines_sizes_; - // Temporary buffer used for decoding when we can't decode directly to the - // output buffers. Large enough for just one iMCU row. - uint8_t** databuf_; - int* databuf_strides_; -}; - -} // namespace libyuv - -#endif // __cplusplus -#endif // INCLUDE_LIBYUV_MJPEG_DECODER_H_ diff --git a/macos/third_party/include/libyuv/planar_functions.h b/macos/third_party/include/libyuv/planar_functions.h deleted file mode 100644 index 91137baba2..0000000000 --- a/macos/third_party/include/libyuv/planar_functions.h +++ /dev/null @@ -1,847 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_PLANAR_FUNCTIONS_H_ -#define INCLUDE_LIBYUV_PLANAR_FUNCTIONS_H_ - -#include "libyuv/basic_types.h" - -// TODO(fbarchard): Remove the following headers includes. -#include "libyuv/convert.h" -#include "libyuv/convert_argb.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// TODO(fbarchard): Move cpu macros to row.h -#if defined(__pnacl__) || defined(__CLR_VER) || \ - (defined(__native_client__) && defined(__x86_64__)) || \ - (defined(__i386__) && !defined(__SSE__) && !defined(__clang__)) -#define LIBYUV_DISABLE_X86 -#endif -// MemorySanitizer does not support assembly code yet. http://crbug.com/344505 -#if defined(__has_feature) -#if __has_feature(memory_sanitizer) -#define LIBYUV_DISABLE_X86 -#endif -#endif -// The following are available on all x86 platforms: -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(_M_IX86) || defined(__x86_64__) || defined(__i386__)) -#define HAS_ARGBAFFINEROW_SSE2 -#endif - -// Copy a plane of data. -LIBYUV_API -void CopyPlane(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -LIBYUV_API -void CopyPlane_16(const uint16_t* src_y, - int src_stride_y, - uint16_t* dst_y, - int dst_stride_y, - int width, - int height); - -LIBYUV_API -void Convert16To8Plane(const uint16_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - int scale, // 16384 for 10 bits - int width, - int height); - -LIBYUV_API -void Convert8To16Plane(const uint8_t* src_y, - int src_stride_y, - uint16_t* dst_y, - int dst_stride_y, - int scale, // 1024 for 10 bits - int width, - int height); - -// Set a plane of data to a 32 bit value. -LIBYUV_API -void SetPlane(uint8_t* dst_y, - int dst_stride_y, - int width, - int height, - uint32_t value); - -// Split interleaved UV plane into separate U and V planes. -LIBYUV_API -void SplitUVPlane(const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Merge separate U and V planes into one interleaved UV plane. -LIBYUV_API -void MergeUVPlane(const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_uv, - int dst_stride_uv, - int width, - int height); - -// Split interleaved RGB plane into separate R, G and B planes. -LIBYUV_API -void SplitRGBPlane(const uint8_t* src_rgb, - int src_stride_rgb, - uint8_t* dst_r, - int dst_stride_r, - uint8_t* dst_g, - int dst_stride_g, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); - -// Merge separate R, G and B planes into one interleaved RGB plane. -LIBYUV_API -void MergeRGBPlane(const uint8_t* src_r, - int src_stride_r, - const uint8_t* src_g, - int src_stride_g, - const uint8_t* src_b, - int src_stride_b, - uint8_t* dst_rgb, - int dst_stride_rgb, - int width, - int height); - -// Copy I400. Supports inverting. -LIBYUV_API -int I400ToI400(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -#define J400ToJ400 I400ToI400 - -// Copy I422 to I422. -#define I422ToI422 I422Copy -LIBYUV_API -int I422Copy(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Copy I444 to I444. -#define I444ToI444 I444Copy -LIBYUV_API -int I444Copy(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert YUY2 to I422. -LIBYUV_API -int YUY2ToI422(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Convert UYVY to I422. -LIBYUV_API -int UYVYToI422(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -LIBYUV_API -int YUY2ToNV12(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_uv, - int dst_stride_uv, - int width, - int height); - -LIBYUV_API -int UYVYToNV12(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_uv, - int dst_stride_uv, - int width, - int height); - -LIBYUV_API -int YUY2ToY(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -// Convert I420 to I400. (calls CopyPlane ignoring u/v). -LIBYUV_API -int I420ToI400(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -// Alias -#define J420ToJ400 I420ToI400 -#define I420ToI420Mirror I420Mirror - -// I420 mirror. -LIBYUV_API -int I420Mirror(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Alias -#define I400ToI400Mirror I400Mirror - -// I400 mirror. A single plane is mirrored horizontally. -// Pass negative height to achieve 180 degree rotation. -LIBYUV_API -int I400Mirror(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -// Alias -#define ARGBToARGBMirror ARGBMirror - -// ARGB mirror. -LIBYUV_API -int ARGBMirror(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert NV12 to RGB565. -LIBYUV_API -int NV12ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -// I422ToARGB is in convert_argb.h -// Convert I422 to BGRA. -LIBYUV_API -int I422ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height); - -// Convert I422 to ABGR. -LIBYUV_API -int I422ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert I422 to RGBA. -LIBYUV_API -int I422ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height); - -// Alias -#define RGB24ToRAW RAWToRGB24 - -LIBYUV_API -int RAWToRGB24(const uint8_t* src_raw, - int src_stride_raw, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -// Draw a rectangle into I420. -LIBYUV_API -int I420Rect(uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int x, - int y, - int width, - int height, - int value_y, - int value_u, - int value_v); - -// Draw a rectangle into ARGB. -LIBYUV_API -int ARGBRect(uint8_t* dst_argb, - int dst_stride_argb, - int dst_x, - int dst_y, - int width, - int height, - uint32_t value); - -// Convert ARGB to gray scale ARGB. -LIBYUV_API -int ARGBGrayTo(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Make a rectangle of ARGB gray scale. -LIBYUV_API -int ARGBGray(uint8_t* dst_argb, - int dst_stride_argb, - int dst_x, - int dst_y, - int width, - int height); - -// Make a rectangle of ARGB Sepia tone. -LIBYUV_API -int ARGBSepia(uint8_t* dst_argb, - int dst_stride_argb, - int dst_x, - int dst_y, - int width, - int height); - -// Apply a matrix rotation to each ARGB pixel. -// matrix_argb is 4 signed ARGB values. -128 to 127 representing -2 to 2. -// The first 4 coefficients apply to B, G, R, A and produce B of the output. -// The next 4 coefficients apply to B, G, R, A and produce G of the output. -// The next 4 coefficients apply to B, G, R, A and produce R of the output. -// The last 4 coefficients apply to B, G, R, A and produce A of the output. -LIBYUV_API -int ARGBColorMatrix(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - const int8_t* matrix_argb, - int width, - int height); - -// Deprecated. Use ARGBColorMatrix instead. -// Apply a matrix rotation to each ARGB pixel. -// matrix_argb is 3 signed ARGB values. -128 to 127 representing -1 to 1. -// The first 4 coefficients apply to B, G, R, A and produce B of the output. -// The next 4 coefficients apply to B, G, R, A and produce G of the output. -// The last 4 coefficients apply to B, G, R, A and produce R of the output. -LIBYUV_API -int RGBColorMatrix(uint8_t* dst_argb, - int dst_stride_argb, - const int8_t* matrix_rgb, - int dst_x, - int dst_y, - int width, - int height); - -// Apply a color table each ARGB pixel. -// Table contains 256 ARGB values. -LIBYUV_API -int ARGBColorTable(uint8_t* dst_argb, - int dst_stride_argb, - const uint8_t* table_argb, - int dst_x, - int dst_y, - int width, - int height); - -// Apply a color table each ARGB pixel but preserve destination alpha. -// Table contains 256 ARGB values. -LIBYUV_API -int RGBColorTable(uint8_t* dst_argb, - int dst_stride_argb, - const uint8_t* table_argb, - int dst_x, - int dst_y, - int width, - int height); - -// Apply a luma/color table each ARGB pixel but preserve destination alpha. -// Table contains 32768 values indexed by [Y][C] where 7 it 7 bit luma from -// RGB (YJ style) and C is an 8 bit color component (R, G or B). -LIBYUV_API -int ARGBLumaColorTable(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - const uint8_t* luma, - int width, - int height); - -// Apply a 3 term polynomial to ARGB values. -// poly points to a 4x4 matrix. The first row is constants. The 2nd row is -// coefficients for b, g, r and a. The 3rd row is coefficients for b squared, -// g squared, r squared and a squared. The 4rd row is coefficients for b to -// the 3, g to the 3, r to the 3 and a to the 3. The values are summed and -// result clamped to 0 to 255. -// A polynomial approximation can be dirived using software such as 'R'. - -LIBYUV_API -int ARGBPolynomial(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - const float* poly, - int width, - int height); - -// Convert plane of 16 bit shorts to half floats. -// Source values are multiplied by scale before storing as half float. -LIBYUV_API -int HalfFloatPlane(const uint16_t* src_y, - int src_stride_y, - uint16_t* dst_y, - int dst_stride_y, - float scale, - int width, - int height); - -// Convert a buffer of bytes to floats, scale the values and store as floats. -LIBYUV_API -int ByteToFloat(const uint8_t* src_y, float* dst_y, float scale, int width); - -// Quantize a rectangle of ARGB. Alpha unaffected. -// scale is a 16 bit fractional fixed point scaler between 0 and 65535. -// interval_size should be a value between 1 and 255. -// interval_offset should be a value between 0 and 255. -LIBYUV_API -int ARGBQuantize(uint8_t* dst_argb, - int dst_stride_argb, - int scale, - int interval_size, - int interval_offset, - int dst_x, - int dst_y, - int width, - int height); - -// Copy ARGB to ARGB. -LIBYUV_API -int ARGBCopy(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Copy Alpha channel of ARGB to alpha of ARGB. -LIBYUV_API -int ARGBCopyAlpha(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Extract the alpha channel from ARGB. -LIBYUV_API -int ARGBExtractAlpha(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_a, - int dst_stride_a, - int width, - int height); - -// Copy Y channel to Alpha of ARGB. -LIBYUV_API -int ARGBCopyYToAlpha(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -typedef void (*ARGBBlendRow)(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); - -// Get function to Alpha Blend ARGB pixels and store to destination. -LIBYUV_API -ARGBBlendRow GetARGBBlend(); - -// Alpha Blend ARGB images and store to destination. -// Source is pre-multiplied by alpha using ARGBAttenuate. -// Alpha of destination is set to 255. -LIBYUV_API -int ARGBBlend(const uint8_t* src_argb0, - int src_stride_argb0, - const uint8_t* src_argb1, - int src_stride_argb1, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Alpha Blend plane and store to destination. -// Source is not pre-multiplied by alpha. -LIBYUV_API -int BlendPlane(const uint8_t* src_y0, - int src_stride_y0, - const uint8_t* src_y1, - int src_stride_y1, - const uint8_t* alpha, - int alpha_stride, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -// Alpha Blend YUV images and store to destination. -// Source is not pre-multiplied by alpha. -// Alpha is full width x height and subsampled to half size to apply to UV. -LIBYUV_API -int I420Blend(const uint8_t* src_y0, - int src_stride_y0, - const uint8_t* src_u0, - int src_stride_u0, - const uint8_t* src_v0, - int src_stride_v0, - const uint8_t* src_y1, - int src_stride_y1, - const uint8_t* src_u1, - int src_stride_u1, - const uint8_t* src_v1, - int src_stride_v1, - const uint8_t* alpha, - int alpha_stride, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - -// Multiply ARGB image by ARGB image. Shifted down by 8. Saturates to 255. -LIBYUV_API -int ARGBMultiply(const uint8_t* src_argb0, - int src_stride_argb0, - const uint8_t* src_argb1, - int src_stride_argb1, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Add ARGB image with ARGB image. Saturates to 255. -LIBYUV_API -int ARGBAdd(const uint8_t* src_argb0, - int src_stride_argb0, - const uint8_t* src_argb1, - int src_stride_argb1, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Subtract ARGB image (argb1) from ARGB image (argb0). Saturates to 0. -LIBYUV_API -int ARGBSubtract(const uint8_t* src_argb0, - int src_stride_argb0, - const uint8_t* src_argb1, - int src_stride_argb1, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert I422 to YUY2. -LIBYUV_API -int I422ToYUY2(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_yuy2, - int dst_stride_yuy2, - int width, - int height); - -// Convert I422 to UYVY. -LIBYUV_API -int I422ToUYVY(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_uyvy, - int dst_stride_uyvy, - int width, - int height); - -// Convert unattentuated ARGB to preattenuated ARGB. -LIBYUV_API -int ARGBAttenuate(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Convert preattentuated ARGB to unattenuated ARGB. -LIBYUV_API -int ARGBUnattenuate(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Internal function - do not call directly. -// Computes table of cumulative sum for image where the value is the sum -// of all values above and to the left of the entry. Used by ARGBBlur. -LIBYUV_API -int ARGBComputeCumulativeSum(const uint8_t* src_argb, - int src_stride_argb, - int32_t* dst_cumsum, - int dst_stride32_cumsum, - int width, - int height); - -// Blur ARGB image. -// dst_cumsum table of width * (height + 1) * 16 bytes aligned to -// 16 byte boundary. -// dst_stride32_cumsum is number of ints in a row (width * 4). -// radius is number of pixels around the center. e.g. 1 = 3x3. 2=5x5. -// Blur is optimized for radius of 5 (11x11) or less. -LIBYUV_API -int ARGBBlur(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int32_t* dst_cumsum, - int dst_stride32_cumsum, - int width, - int height, - int radius); - -// Multiply ARGB image by ARGB value. -LIBYUV_API -int ARGBShade(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height, - uint32_t value); - -// Interpolate between two images using specified amount of interpolation -// (0 to 255) and store to destination. -// 'interpolation' is specified as 8 bit fraction where 0 means 100% src0 -// and 255 means 1% src0 and 99% src1. -LIBYUV_API -int InterpolatePlane(const uint8_t* src0, - int src_stride0, - const uint8_t* src1, - int src_stride1, - uint8_t* dst, - int dst_stride, - int width, - int height, - int interpolation); - -// Interpolate between two ARGB images using specified amount of interpolation -// Internally calls InterpolatePlane with width * 4 (bpp). -LIBYUV_API -int ARGBInterpolate(const uint8_t* src_argb0, - int src_stride_argb0, - const uint8_t* src_argb1, - int src_stride_argb1, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height, - int interpolation); - -// Interpolate between two YUV images using specified amount of interpolation -// Internally calls InterpolatePlane on each plane where the U and V planes -// are half width and half height. -LIBYUV_API -int I420Interpolate(const uint8_t* src0_y, - int src0_stride_y, - const uint8_t* src0_u, - int src0_stride_u, - const uint8_t* src0_v, - int src0_stride_v, - const uint8_t* src1_y, - int src1_stride_y, - const uint8_t* src1_u, - int src1_stride_u, - const uint8_t* src1_v, - int src1_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height, - int interpolation); - -// Row function for copying pixels from a source with a slope to a row -// of destination. Useful for scaling, rotation, mirror, texture mapping. -LIBYUV_API -void ARGBAffineRow_C(const uint8_t* src_argb, - int src_argb_stride, - uint8_t* dst_argb, - const float* uv_dudv, - int width); -// TODO(fbarchard): Move ARGBAffineRow_SSE2 to row.h -LIBYUV_API -void ARGBAffineRow_SSE2(const uint8_t* src_argb, - int src_argb_stride, - uint8_t* dst_argb, - const float* uv_dudv, - int width); - -// Shuffle ARGB channel order. e.g. BGRA to ARGB. -// shuffler is 16 bytes and must be aligned. -LIBYUV_API -int ARGBShuffle(const uint8_t* src_bgra, - int src_stride_bgra, - uint8_t* dst_argb, - int dst_stride_argb, - const uint8_t* shuffler, - int width, - int height); - -// Sobel ARGB effect with planar output. -LIBYUV_API -int ARGBSobelToPlane(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height); - -// Sobel ARGB effect. -LIBYUV_API -int ARGBSobel(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -// Sobel ARGB effect w/ Sobel X, Sobel, Sobel Y in ARGB. -LIBYUV_API -int ARGBSobelXY(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_PLANAR_FUNCTIONS_H_ diff --git a/macos/third_party/include/libyuv/rotate.h b/macos/third_party/include/libyuv/rotate.h deleted file mode 100644 index 76b692be8b..0000000000 --- a/macos/third_party/include/libyuv/rotate.h +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_ROTATE_H_ -#define INCLUDE_LIBYUV_ROTATE_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Supported rotation. -typedef enum RotationMode { - kRotate0 = 0, // No rotation. - kRotate90 = 90, // Rotate 90 degrees clockwise. - kRotate180 = 180, // Rotate 180 degrees. - kRotate270 = 270, // Rotate 270 degrees clockwise. - - // Deprecated. - kRotateNone = 0, - kRotateClockwise = 90, - kRotateCounterClockwise = 270, -} RotationModeEnum; - -// Rotate I420 frame. -LIBYUV_API -int I420Rotate(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height, - enum RotationMode mode); - -// Rotate NV12 input and store in I420. -LIBYUV_API -int NV12ToI420Rotate(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height, - enum RotationMode mode); - -// Rotate a plane by 0, 90, 180, or 270. -LIBYUV_API -int RotatePlane(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width, - int height, - enum RotationMode mode); - -// Rotate planes by 90, 180, 270. Deprecated. -LIBYUV_API -void RotatePlane90(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width, - int height); - -LIBYUV_API -void RotatePlane180(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width, - int height); - -LIBYUV_API -void RotatePlane270(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width, - int height); - -LIBYUV_API -void RotateUV90(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); - -// Rotations for when U and V are interleaved. -// These functions take one input pointer and -// split the data into two buffers while -// rotating them. Deprecated. -LIBYUV_API -void RotateUV180(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); - -LIBYUV_API -void RotateUV270(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); - -// The 90 and 270 functions are based on transposes. -// Doing a transpose with reversing the read/write -// order will result in a rotation by +- 90 degrees. -// Deprecated. -LIBYUV_API -void TransposePlane(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width, - int height); - -LIBYUV_API -void TransposeUV(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_ROTATE_H_ diff --git a/macos/third_party/include/libyuv/rotate_argb.h b/macos/third_party/include/libyuv/rotate_argb.h deleted file mode 100644 index 20432949ab..0000000000 --- a/macos/third_party/include/libyuv/rotate_argb.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2012 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_ROTATE_ARGB_H_ -#define INCLUDE_LIBYUV_ROTATE_ARGB_H_ - -#include "libyuv/basic_types.h" -#include "libyuv/rotate.h" // For RotationMode. - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Rotate ARGB frame -LIBYUV_API -int ARGBRotate(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int src_width, - int src_height, - enum RotationMode mode); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_ROTATE_ARGB_H_ diff --git a/macos/third_party/include/libyuv/rotate_row.h b/macos/third_party/include/libyuv/rotate_row.h deleted file mode 100644 index 022293eef2..0000000000 --- a/macos/third_party/include/libyuv/rotate_row.h +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright 2013 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_ROTATE_ROW_H_ -#define INCLUDE_LIBYUV_ROTATE_ROW_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -#if defined(__pnacl__) || defined(__CLR_VER) || \ - (defined(__native_client__) && defined(__x86_64__)) || \ - (defined(__i386__) && !defined(__SSE__) && !defined(__clang__)) -#define LIBYUV_DISABLE_X86 -#endif -#if defined(__native_client__) -#define LIBYUV_DISABLE_NEON -#endif -// MemorySanitizer does not support assembly code yet. http://crbug.com/344505 -#if defined(__has_feature) -#if __has_feature(memory_sanitizer) -#define LIBYUV_DISABLE_X86 -#endif -#endif -// The following are available for Visual C and clangcl 32 bit: -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) -#define HAS_TRANSPOSEWX8_SSSE3 -#define HAS_TRANSPOSEUVWX8_SSE2 -#endif - -// The following are available for GCC 32 or 64 bit: -#if !defined(LIBYUV_DISABLE_X86) && (defined(__i386__) || defined(__x86_64__)) -#define HAS_TRANSPOSEWX8_SSSE3 -#endif - -// The following are available for 64 bit GCC: -#if !defined(LIBYUV_DISABLE_X86) && defined(__x86_64__) -#define HAS_TRANSPOSEWX8_FAST_SSSE3 -#define HAS_TRANSPOSEUVWX8_SSE2 -#endif - -#if !defined(LIBYUV_DISABLE_NEON) && \ - (defined(__ARM_NEON__) || defined(LIBYUV_NEON) || defined(__aarch64__)) -#define HAS_TRANSPOSEWX8_NEON -#define HAS_TRANSPOSEUVWX8_NEON -#endif - -#if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) -#define HAS_TRANSPOSEWX16_MSA -#define HAS_TRANSPOSEUVWX16_MSA -#endif - -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_TRANSPOSEWX8_MMI -#define HAS_TRANSPOSEUVWX8_MMI -#endif - -void TransposeWxH_C(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width, - int height); - -void TransposeWx8_C(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx16_C(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_NEON(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_SSSE3(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_Fast_SSSE3(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx16_MSA(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); - -void TransposeWx8_Any_NEON(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_Any_SSSE3(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_Any_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx8_Fast_Any_SSSE3(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); -void TransposeWx16_Any_MSA(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); - -void TransposeUVWxH_C(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); - -void TransposeUVWx8_C(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx16_C(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx8_SSE2(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx8_NEON(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx8_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx16_MSA(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); - -void TransposeUVWx8_Any_SSE2(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx8_Any_NEON(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx8_Any_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); -void TransposeUVWx16_Any_MSA(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_ROTATE_ROW_H_ diff --git a/macos/third_party/include/libyuv/row.h b/macos/third_party/include/libyuv/row.h deleted file mode 100644 index 06ab37565c..0000000000 --- a/macos/third_party/include/libyuv/row.h +++ /dev/null @@ -1,3970 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_ROW_H_ -#define INCLUDE_LIBYUV_ROW_H_ - -#include // For malloc. - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -#if defined(__pnacl__) || defined(__CLR_VER) || \ - (defined(__native_client__) && defined(__x86_64__)) || \ - (defined(__i386__) && !defined(__SSE__) && !defined(__clang__)) -#define LIBYUV_DISABLE_X86 -#endif -#if defined(__native_client__) -#define LIBYUV_DISABLE_NEON -#endif -// MemorySanitizer does not support assembly code yet. http://crbug.com/344505 -#if defined(__has_feature) -#if __has_feature(memory_sanitizer) -#define LIBYUV_DISABLE_X86 -#endif -#endif -// clang >= 3.5.0 required for Arm64. -#if defined(__clang__) && defined(__aarch64__) && !defined(LIBYUV_DISABLE_NEON) -#if (__clang_major__ < 3) || (__clang_major__ == 3 && (__clang_minor__ < 5)) -#define LIBYUV_DISABLE_NEON -#endif // clang >= 3.5 -#endif // __clang__ - -// GCC >= 4.7.0 required for AVX2. -#if defined(__GNUC__) && (defined(__x86_64__) || defined(__i386__)) -#if (__GNUC__ > 4) || (__GNUC__ == 4 && (__GNUC_MINOR__ >= 7)) -#define GCC_HAS_AVX2 1 -#endif // GNUC >= 4.7 -#endif // __GNUC__ - -// clang >= 3.4.0 required for AVX2. -#if defined(__clang__) && (defined(__x86_64__) || defined(__i386__)) -#if (__clang_major__ > 3) || (__clang_major__ == 3 && (__clang_minor__ >= 4)) -#define CLANG_HAS_AVX2 1 -#endif // clang >= 3.4 -#endif // __clang__ - -// clang >= 6.0.0 required for AVX512. -#if defined(__clang__) && (defined(__x86_64__) || defined(__i386__)) -// clang in xcode follows a different versioning scheme. -// TODO(fbarchard): fix xcode 9 ios b/789. -#if (__clang_major__ >= 7) && !defined(__APPLE__) -#define CLANG_HAS_AVX512 1 -#endif // clang >= 7 -#endif // __clang__ - -// Visual C 2012 required for AVX2. -#if defined(_M_IX86) && !defined(__clang__) && defined(_MSC_VER) && \ - _MSC_VER >= 1700 -#define VISUALC_HAS_AVX2 1 -#endif // VisualStudio >= 2012 - -// The following are available on all x86 platforms: -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(_M_IX86) || defined(__x86_64__) || defined(__i386__)) -// Conversions: -#define HAS_ABGRTOUVROW_SSSE3 -#define HAS_ABGRTOYROW_SSSE3 -#define HAS_ARGB1555TOARGBROW_SSE2 -#define HAS_ARGB4444TOARGBROW_SSE2 -#define HAS_ARGBEXTRACTALPHAROW_SSE2 -#define HAS_ARGBSETROW_X86 -#define HAS_ARGBSHUFFLEROW_SSSE3 -#define HAS_ARGBTOARGB1555ROW_SSE2 -#define HAS_ARGBTOARGB4444ROW_SSE2 -#define HAS_ARGBTORAWROW_SSSE3 -#define HAS_ARGBTORGB24ROW_SSSE3 -#define HAS_ARGBTORGB565DITHERROW_SSE2 -#define HAS_ARGBTORGB565ROW_SSE2 -#define HAS_ARGBTOUV444ROW_SSSE3 -#define HAS_ARGBTOUVJROW_SSSE3 -#define HAS_ARGBTOUVROW_SSSE3 -#define HAS_ARGBTOYJROW_SSSE3 -#define HAS_ARGBTOYROW_SSSE3 -#define HAS_BGRATOUVROW_SSSE3 -#define HAS_BGRATOYROW_SSSE3 -#define HAS_COPYROW_ERMS -#define HAS_COPYROW_SSE2 -#define HAS_H422TOARGBROW_SSSE3 -#define HAS_HALFFLOATROW_SSE2 -#define HAS_I400TOARGBROW_SSE2 -#define HAS_I422TOARGB1555ROW_SSSE3 -#define HAS_I422TOARGB4444ROW_SSSE3 -#define HAS_I422TOARGBROW_SSSE3 -#define HAS_I422TORGB24ROW_SSSE3 -#define HAS_I422TORGB565ROW_SSSE3 -#define HAS_I422TORGBAROW_SSSE3 -#define HAS_I422TOUYVYROW_SSE2 -#define HAS_I422TOYUY2ROW_SSE2 -#define HAS_I444TOARGBROW_SSSE3 -#define HAS_J400TOARGBROW_SSE2 -#define HAS_J422TOARGBROW_SSSE3 -#define HAS_MERGEUVROW_SSE2 -#define HAS_MIRRORROW_SSSE3 -#define HAS_MIRRORUVROW_SSSE3 -#define HAS_NV12TOARGBROW_SSSE3 -#define HAS_NV12TORGB24ROW_SSSE3 -#define HAS_NV12TORGB565ROW_SSSE3 -#define HAS_NV21TOARGBROW_SSSE3 -#define HAS_NV21TORGB24ROW_SSSE3 -#define HAS_RAWTOARGBROW_SSSE3 -#define HAS_RAWTORGB24ROW_SSSE3 -#define HAS_RAWTOYROW_SSSE3 -#define HAS_RGB24TOARGBROW_SSSE3 -#define HAS_RGB24TOYROW_SSSE3 -#define HAS_RGB565TOARGBROW_SSE2 -#define HAS_RGBATOUVROW_SSSE3 -#define HAS_RGBATOYROW_SSSE3 -#define HAS_SETROW_ERMS -#define HAS_SETROW_X86 -#define HAS_SPLITUVROW_SSE2 -#define HAS_UYVYTOARGBROW_SSSE3 -#define HAS_UYVYTOUV422ROW_SSE2 -#define HAS_UYVYTOUVROW_SSE2 -#define HAS_UYVYTOYROW_SSE2 -#define HAS_YUY2TOARGBROW_SSSE3 -#define HAS_YUY2TOUV422ROW_SSE2 -#define HAS_YUY2TOUVROW_SSE2 -#define HAS_YUY2TOYROW_SSE2 - -// Effects: -#define HAS_ARGBADDROW_SSE2 -#define HAS_ARGBAFFINEROW_SSE2 -#define HAS_ARGBATTENUATEROW_SSSE3 -#define HAS_ARGBBLENDROW_SSSE3 -#define HAS_ARGBCOLORMATRIXROW_SSSE3 -#define HAS_ARGBCOLORTABLEROW_X86 -#define HAS_ARGBCOPYALPHAROW_SSE2 -#define HAS_ARGBCOPYYTOALPHAROW_SSE2 -#define HAS_ARGBGRAYROW_SSSE3 -#define HAS_ARGBLUMACOLORTABLEROW_SSSE3 -#define HAS_ARGBMIRRORROW_SSE2 -#define HAS_ARGBMULTIPLYROW_SSE2 -#define HAS_ARGBPOLYNOMIALROW_SSE2 -#define HAS_ARGBQUANTIZEROW_SSE2 -#define HAS_ARGBSEPIAROW_SSSE3 -#define HAS_ARGBSHADEROW_SSE2 -#define HAS_ARGBSUBTRACTROW_SSE2 -#define HAS_ARGBUNATTENUATEROW_SSE2 -#define HAS_BLENDPLANEROW_SSSE3 -#define HAS_COMPUTECUMULATIVESUMROW_SSE2 -#define HAS_CUMULATIVESUMTOAVERAGEROW_SSE2 -#define HAS_INTERPOLATEROW_SSSE3 -#define HAS_RGBCOLORTABLEROW_X86 -#define HAS_SOBELROW_SSE2 -#define HAS_SOBELTOPLANEROW_SSE2 -#define HAS_SOBELXROW_SSE2 -#define HAS_SOBELXYROW_SSE2 -#define HAS_SOBELYROW_SSE2 - -// The following functions fail on gcc/clang 32 bit with fpic and framepointer. -// caveat: clangcl uses row_win.cc which works. -#if defined(__x86_64__) || !defined(__pic__) || defined(__clang__) || \ - defined(_MSC_VER) -// TODO(fbarchard): fix build error on android_full_debug=1 -// https://code.google.com/p/libyuv/issues/detail?id=517 -#define HAS_I422ALPHATOARGBROW_SSSE3 -#endif -#endif - -// The following are available on all x86 platforms, but -// require VS2012, clang 3.4 or gcc 4.7. -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2) || \ - defined(GCC_HAS_AVX2)) -#define HAS_ARGBCOPYALPHAROW_AVX2 -#define HAS_ARGBCOPYYTOALPHAROW_AVX2 -#define HAS_ARGBEXTRACTALPHAROW_AVX2 -#define HAS_ARGBMIRRORROW_AVX2 -#define HAS_ARGBPOLYNOMIALROW_AVX2 -#define HAS_ARGBSHUFFLEROW_AVX2 -#define HAS_ARGBTORGB565DITHERROW_AVX2 -#define HAS_ARGBTOUVJROW_AVX2 -#define HAS_ARGBTOUVROW_AVX2 -#define HAS_ARGBTOYJROW_AVX2 -#define HAS_ARGBTOYROW_AVX2 -#define HAS_COPYROW_AVX -#define HAS_H422TOARGBROW_AVX2 -#define HAS_HALFFLOATROW_AVX2 -// #define HAS_HALFFLOATROW_F16C // Enable to test halffloat cast -#define HAS_I400TOARGBROW_AVX2 -#define HAS_I422TOARGB1555ROW_AVX2 -#define HAS_I422TOARGB4444ROW_AVX2 -#define HAS_I422TOARGBROW_AVX2 -#define HAS_I422TORGB24ROW_AVX2 -#define HAS_I422TORGB565ROW_AVX2 -#define HAS_I422TORGBAROW_AVX2 -#define HAS_I444TOARGBROW_AVX2 -#define HAS_INTERPOLATEROW_AVX2 -#define HAS_J422TOARGBROW_AVX2 -#define HAS_MERGEUVROW_AVX2 -#define HAS_MIRRORROW_AVX2 -#define HAS_NV12TOARGBROW_AVX2 -#define HAS_NV12TORGB24ROW_AVX2 -#define HAS_NV12TORGB565ROW_AVX2 -#define HAS_NV21TOARGBROW_AVX2 -#define HAS_NV21TORGB24ROW_AVX2 -#define HAS_SPLITUVROW_AVX2 -#define HAS_UYVYTOARGBROW_AVX2 -#define HAS_UYVYTOUV422ROW_AVX2 -#define HAS_UYVYTOUVROW_AVX2 -#define HAS_UYVYTOYROW_AVX2 -#define HAS_YUY2TOARGBROW_AVX2 -#define HAS_YUY2TOUV422ROW_AVX2 -#define HAS_YUY2TOUVROW_AVX2 -#define HAS_YUY2TOYROW_AVX2 - -// Effects: -#define HAS_ARGBADDROW_AVX2 -#define HAS_ARGBATTENUATEROW_AVX2 -#define HAS_ARGBMULTIPLYROW_AVX2 -#define HAS_ARGBSUBTRACTROW_AVX2 -#define HAS_ARGBUNATTENUATEROW_AVX2 -#define HAS_BLENDPLANEROW_AVX2 - -#if defined(__x86_64__) || !defined(__pic__) || defined(__clang__) || \ - defined(_MSC_VER) -// TODO(fbarchard): fix build error on android_full_debug=1 -// https://code.google.com/p/libyuv/issues/detail?id=517 -#define HAS_I422ALPHATOARGBROW_AVX2 -#endif -#endif - -// The following are available for AVX2 Visual C and clangcl 32 bit: -// TODO(fbarchard): Port to gcc. -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \ - (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2)) -#define HAS_ARGB1555TOARGBROW_AVX2 -#define HAS_ARGB4444TOARGBROW_AVX2 -#define HAS_ARGBTOARGB1555ROW_AVX2 -#define HAS_ARGBTOARGB4444ROW_AVX2 -#define HAS_ARGBTORGB565ROW_AVX2 -#define HAS_J400TOARGBROW_AVX2 -#define HAS_RGB565TOARGBROW_AVX2 -#endif - -// The following are also available on x64 Visual C. -#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && defined(_M_X64) && \ - (!defined(__clang__) || defined(__SSSE3__)) -#define HAS_I422ALPHATOARGBROW_SSSE3 -#define HAS_I422TOARGBROW_SSSE3 -#endif - -// The following are available for gcc/clang x86 platforms: -// TODO(fbarchard): Port to Visual C -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) -#define HAS_ABGRTOAR30ROW_SSSE3 -#define HAS_ARGBTOAR30ROW_SSSE3 -#define HAS_CONVERT16TO8ROW_SSSE3 -#define HAS_CONVERT8TO16ROW_SSE2 -// I210 is for H010. 2 = 422. I for 601 vs H for 709. -#define HAS_I210TOAR30ROW_SSSE3 -#define HAS_I210TOARGBROW_SSSE3 -#define HAS_I422TOAR30ROW_SSSE3 -#define HAS_MERGERGBROW_SSSE3 -#define HAS_SPLITRGBROW_SSSE3 -#endif - -// The following are available for AVX2 gcc/clang x86 platforms: -// TODO(fbarchard): Port to Visual C -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) && \ - (defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2)) -#define HAS_ABGRTOAR30ROW_AVX2 -#define HAS_ARGBTOAR30ROW_AVX2 -#define HAS_ARGBTORAWROW_AVX2 -#define HAS_ARGBTORGB24ROW_AVX2 -#define HAS_CONVERT16TO8ROW_AVX2 -#define HAS_CONVERT8TO16ROW_AVX2 -#define HAS_I210TOAR30ROW_AVX2 -#define HAS_I210TOARGBROW_AVX2 -#define HAS_I422TOAR30ROW_AVX2 -#define HAS_I422TOUYVYROW_AVX2 -#define HAS_I422TOYUY2ROW_AVX2 -#define HAS_MERGEUVROW_16_AVX2 -#define HAS_MULTIPLYROW_16_AVX2 -#endif - -// The following are available for AVX512 clang x86 platforms: -// TODO(fbarchard): Port to GCC and Visual C -// TODO(fbarchard): re-enable HAS_ARGBTORGB24ROW_AVX512VBMI. Issue libyuv:789 -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) && \ - (defined(CLANG_HAS_AVX512)) -#define HAS_ARGBTORGB24ROW_AVX512VBMI -#endif - -// The following are available on Neon platforms: -#if !defined(LIBYUV_DISABLE_NEON) && \ - (defined(__aarch64__) || defined(__ARM_NEON__) || defined(LIBYUV_NEON)) -#define HAS_ABGRTOUVROW_NEON -#define HAS_ABGRTOYROW_NEON -#define HAS_ARGB1555TOARGBROW_NEON -#define HAS_ARGB1555TOUVROW_NEON -#define HAS_ARGB1555TOYROW_NEON -#define HAS_ARGB4444TOARGBROW_NEON -#define HAS_ARGB4444TOUVROW_NEON -#define HAS_ARGB4444TOYROW_NEON -#define HAS_ARGBEXTRACTALPHAROW_NEON -#define HAS_ARGBSETROW_NEON -#define HAS_ARGBTOARGB1555ROW_NEON -#define HAS_ARGBTOARGB4444ROW_NEON -#define HAS_ARGBTORAWROW_NEON -#define HAS_ARGBTORGB24ROW_NEON -#define HAS_ARGBTORGB565DITHERROW_NEON -#define HAS_ARGBTORGB565ROW_NEON -#define HAS_ARGBTOUV444ROW_NEON -#define HAS_ARGBTOUVJROW_NEON -#define HAS_ARGBTOUVROW_NEON -#define HAS_ARGBTOYJROW_NEON -#define HAS_ARGBTOYROW_NEON -#define HAS_BGRATOUVROW_NEON -#define HAS_BGRATOYROW_NEON -#define HAS_BYTETOFLOATROW_NEON -#define HAS_COPYROW_NEON -#define HAS_HALFFLOATROW_NEON -#define HAS_I400TOARGBROW_NEON -#define HAS_I422ALPHATOARGBROW_NEON -#define HAS_I422TOARGB1555ROW_NEON -#define HAS_I422TOARGB4444ROW_NEON -#define HAS_I422TOARGBROW_NEON -#define HAS_I422TORGB24ROW_NEON -#define HAS_I422TORGB565ROW_NEON -#define HAS_I422TORGBAROW_NEON -#define HAS_I422TOUYVYROW_NEON -#define HAS_I422TOYUY2ROW_NEON -#define HAS_I444TOARGBROW_NEON -#define HAS_J400TOARGBROW_NEON -#define HAS_MERGEUVROW_NEON -#define HAS_MIRRORROW_NEON -#define HAS_MIRRORUVROW_NEON -#define HAS_NV12TOARGBROW_NEON -#define HAS_NV12TORGB24ROW_NEON -#define HAS_NV12TORGB565ROW_NEON -#define HAS_NV21TOARGBROW_NEON -#define HAS_NV21TORGB24ROW_NEON -#define HAS_RAWTOARGBROW_NEON -#define HAS_RAWTORGB24ROW_NEON -#define HAS_RAWTOUVROW_NEON -#define HAS_RAWTOYROW_NEON -#define HAS_RGB24TOARGBROW_NEON -#define HAS_RGB24TOUVROW_NEON -#define HAS_RGB24TOYROW_NEON -#define HAS_RGB565TOARGBROW_NEON -#define HAS_RGB565TOUVROW_NEON -#define HAS_RGB565TOYROW_NEON -#define HAS_RGBATOUVROW_NEON -#define HAS_RGBATOYROW_NEON -#define HAS_SETROW_NEON -#define HAS_SPLITRGBROW_NEON -#define HAS_SPLITUVROW_NEON -#define HAS_UYVYTOARGBROW_NEON -#define HAS_UYVYTOUV422ROW_NEON -#define HAS_UYVYTOUVROW_NEON -#define HAS_UYVYTOYROW_NEON -#define HAS_YUY2TOARGBROW_NEON -#define HAS_YUY2TOUV422ROW_NEON -#define HAS_YUY2TOUVROW_NEON -#define HAS_YUY2TOYROW_NEON - -// Effects: -#define HAS_ARGBADDROW_NEON -#define HAS_ARGBATTENUATEROW_NEON -#define HAS_ARGBBLENDROW_NEON -#define HAS_ARGBCOLORMATRIXROW_NEON -#define HAS_ARGBGRAYROW_NEON -#define HAS_ARGBMIRRORROW_NEON -#define HAS_ARGBMULTIPLYROW_NEON -#define HAS_ARGBQUANTIZEROW_NEON -#define HAS_ARGBSEPIAROW_NEON -#define HAS_ARGBSHADEROW_NEON -#define HAS_ARGBSHUFFLEROW_NEON -#define HAS_ARGBSUBTRACTROW_NEON -#define HAS_INTERPOLATEROW_NEON -#define HAS_SOBELROW_NEON -#define HAS_SOBELTOPLANEROW_NEON -#define HAS_SOBELXROW_NEON -#define HAS_SOBELXYROW_NEON -#define HAS_SOBELYROW_NEON -#endif - -// The following are available on AArch64 platforms: -#if !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) -#define HAS_SCALESUMSAMPLES_NEON -#endif -#if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) -#define HAS_ABGRTOUVROW_MSA -#define HAS_ABGRTOYROW_MSA -#define HAS_ARGB1555TOARGBROW_MSA -#define HAS_ARGB1555TOUVROW_MSA -#define HAS_ARGB1555TOYROW_MSA -#define HAS_ARGB4444TOARGBROW_MSA -#define HAS_ARGBADDROW_MSA -#define HAS_ARGBATTENUATEROW_MSA -#define HAS_ARGBBLENDROW_MSA -#define HAS_ARGBCOLORMATRIXROW_MSA -#define HAS_ARGBEXTRACTALPHAROW_MSA -#define HAS_ARGBGRAYROW_MSA -#define HAS_ARGBMIRRORROW_MSA -#define HAS_ARGBMULTIPLYROW_MSA -#define HAS_ARGBQUANTIZEROW_MSA -#define HAS_ARGBSEPIAROW_MSA -#define HAS_ARGBSETROW_MSA -#define HAS_ARGBSHADEROW_MSA -#define HAS_ARGBSHUFFLEROW_MSA -#define HAS_ARGBSUBTRACTROW_MSA -#define HAS_ARGBTOARGB1555ROW_MSA -#define HAS_ARGBTOARGB4444ROW_MSA -#define HAS_ARGBTORAWROW_MSA -#define HAS_ARGBTORGB24ROW_MSA -#define HAS_ARGBTORGB565DITHERROW_MSA -#define HAS_ARGBTORGB565ROW_MSA -#define HAS_ARGBTOUV444ROW_MSA -#define HAS_ARGBTOUVJROW_MSA -#define HAS_ARGBTOUVROW_MSA -#define HAS_ARGBTOYJROW_MSA -#define HAS_ARGBTOYROW_MSA -#define HAS_BGRATOUVROW_MSA -#define HAS_BGRATOYROW_MSA -#define HAS_HALFFLOATROW_MSA -#define HAS_I400TOARGBROW_MSA -#define HAS_I422ALPHATOARGBROW_MSA -#define HAS_I422TOARGBROW_MSA -#define HAS_I422TORGB24ROW_MSA -#define HAS_I422TORGBAROW_MSA -#define HAS_I422TOUYVYROW_MSA -#define HAS_I422TOYUY2ROW_MSA -#define HAS_I444TOARGBROW_MSA -#define HAS_INTERPOLATEROW_MSA -#define HAS_J400TOARGBROW_MSA -#define HAS_MERGEUVROW_MSA -#define HAS_MIRRORROW_MSA -#define HAS_MIRRORUVROW_MSA -#define HAS_NV12TOARGBROW_MSA -#define HAS_NV12TORGB565ROW_MSA -#define HAS_NV21TOARGBROW_MSA -#define HAS_RAWTOARGBROW_MSA -#define HAS_RAWTORGB24ROW_MSA -#define HAS_RAWTOUVROW_MSA -#define HAS_RAWTOYROW_MSA -#define HAS_RGB24TOARGBROW_MSA -#define HAS_RGB24TOUVROW_MSA -#define HAS_RGB24TOYROW_MSA -#define HAS_RGB565TOARGBROW_MSA -#define HAS_RGB565TOUVROW_MSA -#define HAS_RGB565TOYROW_MSA -#define HAS_RGBATOUVROW_MSA -#define HAS_RGBATOYROW_MSA -#define HAS_SETROW_MSA -#define HAS_SOBELROW_MSA -#define HAS_SOBELTOPLANEROW_MSA -#define HAS_SOBELXROW_MSA -#define HAS_SOBELXYROW_MSA -#define HAS_SOBELYROW_MSA -#define HAS_SPLITUVROW_MSA -#define HAS_UYVYTOARGBROW_MSA -#define HAS_UYVYTOUVROW_MSA -#define HAS_UYVYTOYROW_MSA -#define HAS_YUY2TOARGBROW_MSA -#define HAS_YUY2TOUV422ROW_MSA -#define HAS_YUY2TOUVROW_MSA -#define HAS_YUY2TOYROW_MSA -#endif - -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_ABGRTOUVROW_MMI -#define HAS_ABGRTOYROW_MMI -#define HAS_ARGB1555TOARGBROW_MMI -#define HAS_ARGB1555TOUVROW_MMI -#define HAS_ARGB1555TOYROW_MMI -#define HAS_ARGB4444TOARGBROW_MMI -#define HAS_ARGB4444TOUVROW_MMI -#define HAS_ARGB4444TOYROW_MMI -#define HAS_ARGBADDROW_MMI -#define HAS_ARGBATTENUATEROW_MMI -#define HAS_ARGBBLENDROW_MMI -#define HAS_ARGBCOLORMATRIXROW_MMI -#define HAS_ARGBCOPYALPHAROW_MMI -#define HAS_ARGBCOPYYTOALPHAROW_MMI -#define HAS_ARGBEXTRACTALPHAROW_MMI -#define HAS_ARGBGRAYROW_MMI -#define HAS_ARGBMIRRORROW_MMI -#define HAS_ARGBMULTIPLYROW_MMI -#define HAS_ARGBSEPIAROW_MMI -#define HAS_ARGBSHADEROW_MMI -#define HAS_ARGBSHUFFLEROW_MMI -#define HAS_ARGBSUBTRACTROW_MMI -#define HAS_ARGBTOARGB1555ROW_MMI -#define HAS_ARGBTOARGB4444ROW_MMI -#define HAS_ARGBTORAWROW_MMI -#define HAS_ARGBTORGB24ROW_MMI -#define HAS_ARGBTORGB565DITHERROW_MMI -#define HAS_ARGBTORGB565ROW_MMI -#define HAS_ARGBTOUV444ROW_MMI -#define HAS_ARGBTOUVJROW_MMI -#define HAS_ARGBTOUVROW_MMI -#define HAS_ARGBTOYJROW_MMI -#define HAS_ARGBTOYROW_MMI -#define HAS_BGRATOUVROW_MMI -#define HAS_BGRATOYROW_MMI -#define HAS_BLENDPLANEROW_MMI -#define HAS_COMPUTECUMULATIVESUMROW_MMI -#define HAS_CUMULATIVESUMTOAVERAGEROW_MMI -#define HAS_HALFFLOATROW_MMI -#define HAS_I400TOARGBROW_MMI -#define HAS_I422TOUYVYROW_MMI -#define HAS_I422TOYUY2ROW_MMI -#define HAS_INTERPOLATEROW_MMI -#define HAS_J400TOARGBROW_MMI -#define HAS_MERGERGBROW_MMI -#define HAS_MERGEUVROW_MMI -#define HAS_MIRRORROW_MMI -#define HAS_MIRRORUVROW_MMI -#define HAS_RAWTOARGBROW_MMI -#define HAS_RAWTORGB24ROW_MMI -#define HAS_RAWTOUVROW_MMI -#define HAS_RAWTOYROW_MMI -#define HAS_RGB24TOARGBROW_MMI -#define HAS_RGB24TOUVROW_MMI -#define HAS_RGB24TOYROW_MMI -#define HAS_RGB565TOARGBROW_MMI -#define HAS_RGB565TOUVROW_MMI -#define HAS_RGB565TOYROW_MMI -#define HAS_RGBATOUVROW_MMI -#define HAS_RGBATOYROW_MMI -#define HAS_SOBELROW_MMI -#define HAS_SOBELTOPLANEROW_MMI -#define HAS_SOBELXROW_MMI -#define HAS_SOBELXYROW_MMI -#define HAS_SOBELYROW_MMI -#define HAS_SPLITRGBROW_MMI -#define HAS_SPLITUVROW_MMI -#define HAS_UYVYTOUVROW_MMI -#define HAS_UYVYTOYROW_MMI -#define HAS_YUY2TOUV422ROW_MMI -#define HAS_YUY2TOUVROW_MMI -#define HAS_YUY2TOYROW_MMI -#endif - -#if defined(_MSC_VER) && !defined(__CLR_VER) && !defined(__clang__) -#if defined(VISUALC_HAS_AVX2) -#define SIMD_ALIGNED(var) __declspec(align(32)) var -#else -#define SIMD_ALIGNED(var) __declspec(align(16)) var -#endif -typedef __declspec(align(16)) int16_t vec16[8]; -typedef __declspec(align(16)) int32_t vec32[4]; -typedef __declspec(align(16)) int8_t vec8[16]; -typedef __declspec(align(16)) uint16_t uvec16[8]; -typedef __declspec(align(16)) uint32_t uvec32[4]; -typedef __declspec(align(16)) uint8_t uvec8[16]; -typedef __declspec(align(32)) int16_t lvec16[16]; -typedef __declspec(align(32)) int32_t lvec32[8]; -typedef __declspec(align(32)) int8_t lvec8[32]; -typedef __declspec(align(32)) uint16_t ulvec16[16]; -typedef __declspec(align(32)) uint32_t ulvec32[8]; -typedef __declspec(align(32)) uint8_t ulvec8[32]; -#elif !defined(__pnacl__) && (defined(__GNUC__) || defined(__clang__)) -// Caveat GCC 4.2 to 4.7 have a known issue using vectors with const. -#if defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2) -#define SIMD_ALIGNED(var) var __attribute__((aligned(32))) -#else -#define SIMD_ALIGNED(var) var __attribute__((aligned(16))) -#endif -typedef int16_t __attribute__((vector_size(16))) vec16; -typedef int32_t __attribute__((vector_size(16))) vec32; -typedef int8_t __attribute__((vector_size(16))) vec8; -typedef uint16_t __attribute__((vector_size(16))) uvec16; -typedef uint32_t __attribute__((vector_size(16))) uvec32; -typedef uint8_t __attribute__((vector_size(16))) uvec8; -typedef int16_t __attribute__((vector_size(32))) lvec16; -typedef int32_t __attribute__((vector_size(32))) lvec32; -typedef int8_t __attribute__((vector_size(32))) lvec8; -typedef uint16_t __attribute__((vector_size(32))) ulvec16; -typedef uint32_t __attribute__((vector_size(32))) ulvec32; -typedef uint8_t __attribute__((vector_size(32))) ulvec8; -#else -#define SIMD_ALIGNED(var) var -typedef int16_t vec16[8]; -typedef int32_t vec32[4]; -typedef int8_t vec8[16]; -typedef uint16_t uvec16[8]; -typedef uint32_t uvec32[4]; -typedef uint8_t uvec8[16]; -typedef int16_t lvec16[16]; -typedef int32_t lvec32[8]; -typedef int8_t lvec8[32]; -typedef uint16_t ulvec16[16]; -typedef uint32_t ulvec32[8]; -typedef uint8_t ulvec8[32]; -#endif - -#if defined(__aarch64__) -// This struct is for Arm64 color conversion. -struct YuvConstants { - uvec16 kUVToRB; - uvec16 kUVToRB2; - uvec16 kUVToG; - uvec16 kUVToG2; - vec16 kUVBiasBGR; - vec32 kYToRgb; -}; -#elif defined(__arm__) -// This struct is for ArmV7 color conversion. -struct YuvConstants { - uvec8 kUVToRB; - uvec8 kUVToG; - vec16 kUVBiasBGR; - vec32 kYToRgb; -}; -#else -// This struct is for Intel color conversion. -struct YuvConstants { - int8_t kUVToB[32]; - int8_t kUVToG[32]; - int8_t kUVToR[32]; - int16_t kUVBiasB[16]; - int16_t kUVBiasG[16]; - int16_t kUVBiasR[16]; - int16_t kYToRgb[16]; -}; - -// Offsets into YuvConstants structure -#define KUVTOB 0 -#define KUVTOG 32 -#define KUVTOR 64 -#define KUVBIASB 96 -#define KUVBIASG 128 -#define KUVBIASR 160 -#define KYTORGB 192 -#endif - -// Conversion matrix for YUV to RGB -extern const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants); // BT.601 -extern const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants); // JPeg -extern const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants); // BT.709 - -// Conversion matrix for YVU to BGR -extern const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants); // BT.601 -extern const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants); // JPeg -extern const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants); // BT.709 - -#define IS_ALIGNED(p, a) (!((uintptr_t)(p) & ((a)-1))) - -#define align_buffer_64(var, size) \ - uint8_t* var##_mem = (uint8_t*)(malloc((size) + 63)); /* NOLINT */ \ - uint8_t* var = (uint8_t*)(((intptr_t)(var##_mem) + 63) & ~63) /* NOLINT */ - -#define free_aligned_buffer_64(var) \ - free(var##_mem); \ - var = 0 - -#if defined(__APPLE__) || defined(__x86_64__) || defined(__llvm__) -#define OMITFP -#else -#define OMITFP __attribute__((optimize("omit-frame-pointer"))) -#endif - -// NaCL macros for GCC x86 and x64. -#if defined(__native_client__) -#define LABELALIGN ".p2align 5\n" -#else -#define LABELALIGN -#endif - -// Intel Code Analizer markers. Insert IACA_START IACA_END around code to be -// measured and then run with iaca -64 libyuv_unittest. -// IACA_ASM_START amd IACA_ASM_END are equivalents that can be used within -// inline assembly blocks. -// example of iaca: -// ~/iaca-lin64/bin/iaca.sh -64 -analysis LATENCY out/Release/libyuv_unittest - -#if defined(__x86_64__) || defined(__i386__) - -#define IACA_ASM_START \ - ".byte 0x0F, 0x0B\n" \ - " movl $111, %%ebx\n" \ - ".byte 0x64, 0x67, 0x90\n" - -#define IACA_ASM_END \ - " movl $222, %%ebx\n" \ - ".byte 0x64, 0x67, 0x90\n" \ - ".byte 0x0F, 0x0B\n" - -#define IACA_SSC_MARK(MARK_ID) \ - __asm__ __volatile__("\n\t movl $" #MARK_ID \ - ", %%ebx" \ - "\n\t .byte 0x64, 0x67, 0x90" \ - : \ - : \ - : "memory"); - -#define IACA_UD_BYTES __asm__ __volatile__("\n\t .byte 0x0F, 0x0B"); - -#else /* Visual C */ -#define IACA_UD_BYTES \ - { __asm _emit 0x0F __asm _emit 0x0B } - -#define IACA_SSC_MARK(x) \ - { __asm mov ebx, x __asm _emit 0x64 __asm _emit 0x67 __asm _emit 0x90 } - -#define IACA_VC64_START __writegsbyte(111, 111); -#define IACA_VC64_END __writegsbyte(222, 222); -#endif - -#define IACA_START \ - { \ - IACA_UD_BYTES \ - IACA_SSC_MARK(111) \ - } -#define IACA_END \ - { \ - IACA_SSC_MARK(222) \ - IACA_UD_BYTES \ - } - -void I444ToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - const uint8_t* src_a, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgba, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb1555, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb4444, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_NEON(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_NEON(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_NEON(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_NEON(const uint8_t* src_uyvy, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); - -void I422ToARGBRow_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - const uint8_t* src_a, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb4444, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb1555, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_MSA(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_MSA(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_MSA(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_MSA(const uint8_t* src_yuy2, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_MSA(const uint8_t* src_uyvy, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); - -void ARGBToYRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYJRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width); -void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width); -void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width); -void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width); -void RGB24ToYRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); -void RAWToYRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); -void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToUV444Row_NEON(const uint8_t* src_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_NEON(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUV444Row_MSA(const uint8_t* src_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_MSA(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUV444Row_MMI(const uint8_t* src_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_MMI(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_NEON(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_NEON(const uint8_t* src_bgra, - int src_stride_bgra, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_NEON(const uint8_t* src_abgr, - int src_stride_abgr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_NEON(const uint8_t* src_rgba, - int src_stride_rgba, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_NEON(const uint8_t* src_rgb24, - int src_stride_rgb24, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_NEON(const uint8_t* src_raw, - int src_stride_raw, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_MSA(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_MSA(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_MSA(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_MSA(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_MSA(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_MSA(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_MSA(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_MSA(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_MMI(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_MMI(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_MMI(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_MMI(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_MMI(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_MMI(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB4444ToUVRow_MMI(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width); -void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width); -void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width); -void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width); -void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width); -void RGB565ToYRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_y, int width); -void ARGB1555ToYRow_NEON(const uint8_t* src_argb1555, - uint8_t* dst_y, - int width); -void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, - uint8_t* dst_y, - int width); -void BGRAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ABGRToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGBAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGB565ToYRow_MSA(const uint8_t* src_rgb565, uint8_t* dst_y, int width); -void ARGB1555ToYRow_MSA(const uint8_t* src_argb1555, uint8_t* dst_y, int width); -void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGB565ToYRow_MMI(const uint8_t* src_rgb565, uint8_t* dst_y, int width); -void ARGB1555ToYRow_MMI(const uint8_t* src_argb1555, uint8_t* dst_y, int width); -void ARGB4444ToYRow_MMI(const uint8_t* src_argb4444, - uint8_t* dst_y, - int width); - -void ARGBToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void BGRAToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ABGRToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGBAToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGB24ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RAWToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void RGB565ToYRow_C(const uint8_t* src_rgb565, uint8_t* dst_y, int width); -void ARGB1555ToYRow_C(const uint8_t* src_argb1555, uint8_t* dst_y, int width); -void ARGB4444ToYRow_C(const uint8_t* src_argb4444, uint8_t* dst_y, int width); -void ARGBToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void BGRAToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ABGRToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGBAToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToYRow_Any_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); -void RAWToYRow_Any_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); -void ARGBToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void BGRAToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ABGRToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGBAToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB565ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB1555ToYRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToYRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void BGRAToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ABGRToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGBAToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB565ToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB1555ToYRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void BGRAToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ABGRToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGBAToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB565ToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB1555ToYRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToYRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGBToUVRow_AVX2(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0, - int src_stride_bgra, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0, - int src_stride_abgr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, - int src_stride_rgba, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUV444Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUV444Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUV444Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB4444ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB4444ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVJRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ABGRToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGBAToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB24ToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RAWToUVRow_C(const uint8_t* src_rgb0, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_C(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB1555ToUVRow_C(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB4444ToUVRow_C(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void ARGBToUV444Row_SSSE3(const uint8_t* src_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUV444Row_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void ARGBToUV444Row_C(const uint8_t* src_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void MirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_SSSE3(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_MMI(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_C(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void MirrorRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void MirrorRow_Any_SSE2(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void MirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void MirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); - -void MirrorUVRow_SSSE3(const uint8_t* src, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_NEON(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_MSA(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_MMI(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_C(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void ARGBMirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_MMI(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_C(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBMirrorRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBMirrorRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBMirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBMirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); - -void SplitUVRow_C(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_SSE2(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_AVX2(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_NEON(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_MSA(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_MMI(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void SplitUVRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void MergeUVRow_C(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width); -void MergeUVRow_SSE2(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width); -void MergeUVRow_AVX2(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width); -void MergeUVRow_NEON(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width); -void MergeUVRow_MSA(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width); -void MergeUVRow_MMI(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width); -void MergeUVRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void MergeUVRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void MergeUVRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void MergeUVRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void MergeUVRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); - -void SplitRGBRow_C(const uint8_t* src_rgb, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); -void SplitRGBRow_SSSE3(const uint8_t* src_rgb, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); -void SplitRGBRow_NEON(const uint8_t* src_rgb, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); -void SplitRGBRow_MMI(const uint8_t* src_rgb, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); -void SplitRGBRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); -void SplitRGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); -void SplitRGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); - -void MergeRGBRow_C(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); -void MergeRGBRow_SSSE3(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); -void MergeRGBRow_NEON(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); -void MergeRGBRow_MMI(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); -void MergeRGBRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void MergeRGBRow_Any_NEON(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); -void MergeRGBRow_Any_MMI(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); - -void MergeUVRow_16_C(const uint16_t* src_u, - const uint16_t* src_v, - uint16_t* dst_uv, - int scale, /* 64 for 10 bit */ - int width); -void MergeUVRow_16_AVX2(const uint16_t* src_u, - const uint16_t* src_v, - uint16_t* dst_uv, - int scale, - int width); - -void MultiplyRow_16_AVX2(const uint16_t* src_y, - uint16_t* dst_y, - int scale, - int width); -void MultiplyRow_16_C(const uint16_t* src_y, - uint16_t* dst_y, - int scale, - int width); - -void Convert8To16Row_C(const uint8_t* src_y, - uint16_t* dst_y, - int scale, - int width); -void Convert8To16Row_SSE2(const uint8_t* src_y, - uint16_t* dst_y, - int scale, - int width); -void Convert8To16Row_AVX2(const uint8_t* src_y, - uint16_t* dst_y, - int scale, - int width); -void Convert8To16Row_Any_SSE2(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int scale, - int width); -void Convert8To16Row_Any_AVX2(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int scale, - int width); - -void Convert16To8Row_C(const uint16_t* src_y, - uint8_t* dst_y, - int scale, - int width); -void Convert16To8Row_SSSE3(const uint16_t* src_y, - uint8_t* dst_y, - int scale, - int width); -void Convert16To8Row_AVX2(const uint16_t* src_y, - uint8_t* dst_y, - int scale, - int width); -void Convert16To8Row_Any_SSSE3(const uint16_t* src_ptr, - uint8_t* dst_ptr, - int scale, - int width); -void Convert16To8Row_Any_AVX2(const uint16_t* src_ptr, - uint8_t* dst_ptr, - int scale, - int width); - -void CopyRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void CopyRow_AVX(const uint8_t* src, uint8_t* dst, int width); -void CopyRow_ERMS(const uint8_t* src, uint8_t* dst, int width); -void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width); -void CopyRow_MIPS(const uint8_t* src, uint8_t* dst, int count); -void CopyRow_C(const uint8_t* src, uint8_t* dst, int count); -void CopyRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void CopyRow_Any_AVX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void CopyRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); - -void CopyRow_16_C(const uint16_t* src, uint16_t* dst, int count); - -void ARGBCopyAlphaRow_C(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyAlphaRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyAlphaRow_MMI(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyAlphaRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBCopyAlphaRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBCopyAlphaRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGBExtractAlphaRow_C(const uint8_t* src_argb, uint8_t* dst_a, int width); -void ARGBExtractAlphaRow_SSE2(const uint8_t* src_argb, - uint8_t* dst_a, - int width); -void ARGBExtractAlphaRow_AVX2(const uint8_t* src_argb, - uint8_t* dst_a, - int width); -void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, - uint8_t* dst_a, - int width); -void ARGBExtractAlphaRow_MSA(const uint8_t* src_argb, - uint8_t* dst_a, - int width); -void ARGBExtractAlphaRow_MMI(const uint8_t* src_argb, - uint8_t* dst_a, - int width); -void ARGBExtractAlphaRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBExtractAlphaRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBExtractAlphaRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBExtractAlphaRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBExtractAlphaRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGBCopyYToAlphaRow_C(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyYToAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyYToAlphaRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyYToAlphaRow_MMI(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyYToAlphaRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBCopyYToAlphaRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBCopyYToAlphaRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void SetRow_C(uint8_t* dst, uint8_t v8, int width); -void SetRow_MSA(uint8_t* dst, uint8_t v8, int width); -void SetRow_X86(uint8_t* dst, uint8_t v8, int width); -void SetRow_ERMS(uint8_t* dst, uint8_t v8, int width); -void SetRow_NEON(uint8_t* dst, uint8_t v8, int width); -void SetRow_Any_X86(uint8_t* dst_ptr, uint8_t v32, int width); -void SetRow_Any_NEON(uint8_t* dst_ptr, uint8_t v32, int width); - -void ARGBSetRow_C(uint8_t* dst_argb, uint32_t v32, int width); -void ARGBSetRow_X86(uint8_t* dst_argb, uint32_t v32, int width); -void ARGBSetRow_NEON(uint8_t* dst, uint32_t v32, int width); -void ARGBSetRow_Any_NEON(uint8_t* dst_ptr, uint32_t v32, int width); -void ARGBSetRow_MSA(uint8_t* dst_argb, uint32_t v32, int width); -void ARGBSetRow_Any_MSA(uint8_t* dst_ptr, uint32_t v32, int width); - -// ARGBShufflers for BGRAToARGB etc. -void ARGBShuffleRow_C(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); -void ARGBShuffleRow_SSSE3(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); -void ARGBShuffleRow_AVX2(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); -void ARGBShuffleRow_NEON(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); -void ARGBShuffleRow_MSA(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); -void ARGBShuffleRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); -void ARGBShuffleRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint8_t* param, - int width); -void ARGBShuffleRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint8_t* param, - int width); -void ARGBShuffleRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint8_t* param, - int width); -void ARGBShuffleRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint8_t* param, - int width); -void ARGBShuffleRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint8_t* param, - int width); - -void RGB24ToARGBRow_SSSE3(const uint8_t* src_rgb24, - uint8_t* dst_argb, - int width); -void RAWToARGBRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_argb, int width); -void RAWToRGB24Row_SSSE3(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); -void RGB565ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGB1555ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGB4444ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void RGB565ToARGBRow_AVX2(const uint8_t* src_rgb565, - uint8_t* dst_argb, - int width); -void ARGB1555ToARGBRow_AVX2(const uint8_t* src_argb1555, - uint8_t* dst_argb, - int width); -void ARGB4444ToARGBRow_AVX2(const uint8_t* src_argb4444, - uint8_t* dst_argb, - int width); - -void RGB24ToARGBRow_NEON(const uint8_t* src_rgb24, - uint8_t* dst_argb, - int width); -void RGB24ToARGBRow_MSA(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); -void RGB24ToARGBRow_MMI(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); -void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width); -void RAWToARGBRow_MSA(const uint8_t* src_raw, uint8_t* dst_argb, int width); -void RAWToARGBRow_MMI(const uint8_t* src_raw, uint8_t* dst_argb, int width); -void RAWToRGB24Row_NEON(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); -void RAWToRGB24Row_MSA(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); -void RAWToRGB24Row_MMI(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); -void RGB565ToARGBRow_NEON(const uint8_t* src_rgb565, - uint8_t* dst_argb, - int width); -void RGB565ToARGBRow_MSA(const uint8_t* src_rgb565, - uint8_t* dst_argb, - int width); -void RGB565ToARGBRow_MMI(const uint8_t* src_rgb565, - uint8_t* dst_argb, - int width); -void ARGB1555ToARGBRow_NEON(const uint8_t* src_argb1555, - uint8_t* dst_argb, - int width); -void ARGB1555ToARGBRow_MSA(const uint8_t* src_argb1555, - uint8_t* dst_argb, - int width); -void ARGB1555ToARGBRow_MMI(const uint8_t* src_argb1555, - uint8_t* dst_argb, - int width); -void ARGB4444ToARGBRow_NEON(const uint8_t* src_argb4444, - uint8_t* dst_argb, - int width); -void ARGB4444ToARGBRow_MSA(const uint8_t* src_argb4444, - uint8_t* dst_argb, - int width); -void ARGB4444ToARGBRow_MMI(const uint8_t* src_argb4444, - uint8_t* dst_argb, - int width); -void RGB24ToARGBRow_C(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); -void RAWToARGBRow_C(const uint8_t* src_raw, uint8_t* dst_argb, int width); -void RAWToRGB24Row_C(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); -void RGB565ToARGBRow_C(const uint8_t* src_rgb565, uint8_t* dst_argb, int width); -void ARGB1555ToARGBRow_C(const uint8_t* src_argb1555, - uint8_t* dst_argb, - int width); -void ARGB4444ToARGBRow_C(const uint8_t* src_argb4444, - uint8_t* dst_argb, - int width); -void AR30ToARGBRow_C(const uint8_t* src_ar30, uint8_t* dst_argb, int width); -void AR30ToABGRRow_C(const uint8_t* src_ar30, uint8_t* dst_abgr, int width); -void ARGBToAR30Row_C(const uint8_t* src_argb, uint8_t* dst_ar30, int width); -void AR30ToAB30Row_C(const uint8_t* src_ar30, uint8_t* dst_ab30, int width); - -void RGB24ToARGBRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RAWToARGBRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RAWToRGB24Row_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void RGB565ToARGBRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB1555ToARGBRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToARGBRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RGB565ToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB1555ToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void RGB24ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RGB24ToARGBRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RGB24ToARGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RAWToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToARGBRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToRGB24Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RAWToRGB24Row_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToRGB24Row_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB565ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RGB565ToARGBRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void RGB565ToARGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB1555ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB1555ToARGBRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB1555ToARGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGB4444ToARGBRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToARGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGBToRGB24Row_SSSE3(const uint8_t* src, uint8_t* dst, int width); -void ARGBToRAWRow_SSSE3(const uint8_t* src, uint8_t* dst, int width); -void ARGBToRGB565Row_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGBToARGB1555Row_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGBToARGB4444Row_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ABGRToAR30Row_SSSE3(const uint8_t* src, uint8_t* dst, int width); -void ARGBToAR30Row_SSSE3(const uint8_t* src, uint8_t* dst, int width); - -void ARGBToRAWRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBToRGB24Row_AVX2(const uint8_t* src, uint8_t* dst, int width); - -void ARGBToRGB24Row_AVX512VBMI(const uint8_t* src, uint8_t* dst, int width); - -void ARGBToRGB565DitherRow_C(const uint8_t* src_argb, - uint8_t* dst_rgb, - const uint32_t dither4, - int width); -void ARGBToRGB565DitherRow_SSE2(const uint8_t* src, - uint8_t* dst, - const uint32_t dither4, - int width); -void ARGBToRGB565DitherRow_AVX2(const uint8_t* src, - uint8_t* dst, - const uint32_t dither4, - int width); - -void ARGBToRGB565Row_AVX2(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToARGB1555Row_AVX2(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToARGB4444Row_AVX2(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width); - -void ARGBToRGB24Row_NEON(const uint8_t* src_argb, - uint8_t* dst_rgb24, - int width); -void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width); -void ARGBToRGB565Row_NEON(const uint8_t* src_argb, - uint8_t* dst_rgb565, - int width); -void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, - uint8_t* dst_argb1555, - int width); -void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, - uint8_t* dst_argb4444, - int width); -void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, - uint8_t* dst_rgb, - const uint32_t dither4, - int width); -void ARGBToRGB24Row_MSA(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRAWRow_MSA(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRGB565Row_MSA(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToARGB1555Row_MSA(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToARGB4444Row_MSA(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToRGB565DitherRow_MSA(const uint8_t* src_argb, - uint8_t* dst_rgb, - const uint32_t dither4, - int width); - -void ARGBToRGB24Row_MMI(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRAWRow_MMI(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRGB565Row_MMI(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToARGB1555Row_MMI(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToARGB4444Row_MMI(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToRGB565DitherRow_MMI(const uint8_t* src_argb, - uint8_t* dst_rgb, - const uint32_t dither4, - int width); - -void ARGBToRGBARow_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRGB24Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRAWRow_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRGB565Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToARGB1555Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToARGB4444Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ABGRToAR30Row_C(const uint8_t* src_abgr, uint8_t* dst_ar30, int width); -void ARGBToAR30Row_C(const uint8_t* src_argb, uint8_t* dst_ar30, int width); - -void J400ToARGBRow_SSE2(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_AVX2(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_C(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void J400ToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void J400ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void J400ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void J400ToARGBRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); - -void I444ToARGBRow_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422ToAR30Row_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I210ToAR30Row_C(const uint16_t* src_y, - const uint16_t* src_u, - const uint16_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I210ToARGBRow_C(const uint16_t* src_y, - const uint16_t* src_u, - const uint16_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - const uint8_t* src_a, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_C(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_C(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_C(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_C(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_C(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_C(const uint8_t* src_yuy2, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_C(const uint8_t* src_uyvy, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb4444, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb1555, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); - -void I422ToAR30Row_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ar30, - const struct YuvConstants* yuvconstants, - int width); -void I210ToAR30Row_SSSE3(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ar30, - const struct YuvConstants* yuvconstants, - int width); -void I210ToARGBRow_SSSE3(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToAR30Row_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ar30, - const struct YuvConstants* yuvconstants, - int width); -void I210ToARGBRow_AVX2(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I210ToAR30Row_AVX2(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ar30, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_SSSE3(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_SSSE3(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_SSSE3(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_AVX2(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_AVX2(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_AVX2(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* vu_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* vu_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_SSSE3(const uint8_t* yuy2_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_SSSE3(const uint8_t* uyvy_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_AVX2(const uint8_t* yuy2_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_rgba, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_SSSE3(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb4444, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_AVX2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb4444, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_SSSE3(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb1555, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_AVX2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb1555, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_SSSE3(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_AVX2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_AVX2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToAR30Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I210ToAR30Row_Any_SSSE3(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I210ToARGBRow_Any_SSSE3(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToAR30Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I210ToARGBRow_Any_AVX2(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I210ToAR30Row_Any_AVX2(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); - -void I400ToARGBRow_C(const uint8_t* src_y, uint8_t* rgb_buf, int width); -void I400ToARGBRow_SSE2(const uint8_t* y_buf, uint8_t* dst_argb, int width); -void I400ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, int width); -void I400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width); -void I400ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, int width); -void I400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* dst_argb, int width); -void I400ToARGBRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void I400ToARGBRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void I400ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void I400ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void I400ToARGBRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); - -// ARGB preattenuated alpha blend. -void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBBlendRow_NEON(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBBlendRow_MSA(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBBlendRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBBlendRow_C(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); - -// Unattenuated planar alpha blend. -void BlendPlaneRow_SSSE3(const uint8_t* src0, - const uint8_t* src1, - const uint8_t* alpha, - uint8_t* dst, - int width); -void BlendPlaneRow_Any_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void BlendPlaneRow_AVX2(const uint8_t* src0, - const uint8_t* src1, - const uint8_t* alpha, - uint8_t* dst, - int width); -void BlendPlaneRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void BlendPlaneRow_MMI(const uint8_t* src0, - const uint8_t* src1, - const uint8_t* alpha, - uint8_t* dst, - int width); -void BlendPlaneRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void BlendPlaneRow_C(const uint8_t* src0, - const uint8_t* src1, - const uint8_t* alpha, - uint8_t* dst, - int width); - -// ARGB multiply images. Same API as Blend, but these require -// pointer and width alignment for SSE2. -void ARGBMultiplyRow_C(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBMultiplyRow_MSA(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBMultiplyRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); - -// ARGB add images. -void ARGBAddRow_C(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_SSE2(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBAddRow_AVX2(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBAddRow_NEON(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBAddRow_MSA(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBAddRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); - -// ARGB subtract images. Same API as Blend, but these require -// pointer and width alignment for SSE2. -void ARGBSubtractRow_C(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBSubtractRow_AVX2(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBSubtractRow_NEON(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBSubtractRow_MSA(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void ARGBSubtractRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); - -void ARGBToRGB24Row_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRAWRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB565Row_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB1555Row_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB4444Row_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ABGRToAR30Row_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToAR30Row_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRAWRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToRGB24Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB24Row_Any_AVX512VBMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB565DitherRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint32_t param, - int width); -void ARGBToRGB565DitherRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint32_t param, - int width); - -void ARGBToRGB565Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB1555Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB4444Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ABGRToAR30Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToAR30Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGBToRGB24Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRAWRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToRGB565Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB1555Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB4444Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB565DitherRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint32_t param, - int width); -void ARGBToRGB24Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRAWRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToRGB565Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB1555Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB4444Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB565DitherRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint32_t param, - int width); - -void ARGBToRGB24Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRAWRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToRGB565Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB1555Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB4444Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB565DitherRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint32_t param, - int width); - -void I444ToARGBRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I444ToARGBRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGBRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); - -void YUY2ToYRow_AVX2(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToUVRow_AVX2(const uint8_t* src_yuy2, - int stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_AVX2(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_SSE2(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToUVRow_SSE2(const uint8_t* src_yuy2, - int stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_SSE2(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToUVRow_NEON(const uint8_t* src_yuy2, - int stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_NEON(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_MSA(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToYRow_MMI(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToUVRow_MSA(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUVRow_MMI(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_MSA(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_MMI(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_C(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToUVRow_C(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_C(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void YUY2ToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void YUY2ToUVRow_Any_SSE2(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void YUY2ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void YUY2ToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void YUY2ToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void YUY2ToUV422Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_AVX2(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToUVRow_AVX2(const uint8_t* src_uyvy, - int stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_AVX2(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_SSE2(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToUVRow_SSE2(const uint8_t* src_uyvy, - int stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_SSE2(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_AVX2(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToUVRow_AVX2(const uint8_t* src_uyvy, - int stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_AVX2(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToUVRow_NEON(const uint8_t* src_uyvy, - int stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_NEON(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_MSA(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToYRow_MMI(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToUVRow_MSA(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUVRow_MMI(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_MSA(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_MMI(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void UYVYToYRow_C(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToUVRow_C(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_C(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void UYVYToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void UYVYToUVRow_Any_SSE2(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void UYVYToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void UYVYToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void UYVYToUVRow_Any_MSA(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void UYVYToUV422Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); - -void I422ToYUY2Row_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_frame, - int width); -void I422ToUYVYRow_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_frame, - int width); -void I422ToYUY2Row_SSE2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_yuy2, - int width); -void I422ToUYVYRow_SSE2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uyvy, - int width); -void I422ToYUY2Row_Any_SSE2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToUYVYRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToYUY2Row_AVX2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_yuy2, - int width); -void I422ToUYVYRow_AVX2(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uyvy, - int width); -void I422ToYUY2Row_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToUYVYRow_Any_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToYUY2Row_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_yuy2, - int width); -void I422ToUYVYRow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uyvy, - int width); -void I422ToYUY2Row_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToUYVYRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToYUY2Row_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_yuy2, - int width); -void I422ToYUY2Row_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_yuy2, - int width); -void I422ToUYVYRow_MSA(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uyvy, - int width); -void I422ToUYVYRow_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uyvy, - int width); -void I422ToYUY2Row_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToYUY2Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToUYVYRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); -void I422ToUYVYRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); - -// Effects related row functions. -void ARGBAttenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBAttenuateRow_SSSE3(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBAttenuateRow_AVX2(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBAttenuateRow_NEON(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBAttenuateRow_MSA(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBAttenuateRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBAttenuateRow_Any_SSSE3(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBAttenuateRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBAttenuateRow_Any_NEON(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBAttenuateRow_Any_MSA(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBAttenuateRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -// Inverse table for unattenuate, shared by C and SSE2. -extern const uint32_t fixed_invtbl8[256]; -void ARGBUnattenuateRow_C(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBUnattenuateRow_SSE2(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBUnattenuateRow_AVX2(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); -void ARGBUnattenuateRow_Any_SSE2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBUnattenuateRow_Any_AVX2(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); - -void ARGBGrayRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBGrayRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBGrayRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBGrayRow_MMI(const uint8_t* src_argb, uint8_t* dst_argb, int width); - -void ARGBSepiaRow_C(uint8_t* dst_argb, int width); -void ARGBSepiaRow_SSSE3(uint8_t* dst_argb, int width); -void ARGBSepiaRow_NEON(uint8_t* dst_argb, int width); -void ARGBSepiaRow_MSA(uint8_t* dst_argb, int width); -void ARGBSepiaRow_MMI(uint8_t* dst_argb, int width); - -void ARGBColorMatrixRow_C(const uint8_t* src_argb, - uint8_t* dst_argb, - const int8_t* matrix_argb, - int width); -void ARGBColorMatrixRow_SSSE3(const uint8_t* src_argb, - uint8_t* dst_argb, - const int8_t* matrix_argb, - int width); -void ARGBColorMatrixRow_NEON(const uint8_t* src_argb, - uint8_t* dst_argb, - const int8_t* matrix_argb, - int width); -void ARGBColorMatrixRow_MSA(const uint8_t* src_argb, - uint8_t* dst_argb, - const int8_t* matrix_argb, - int width); -void ARGBColorMatrixRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - const int8_t* matrix_argb, - int width); - -void ARGBColorTableRow_C(uint8_t* dst_argb, - const uint8_t* table_argb, - int width); -void ARGBColorTableRow_X86(uint8_t* dst_argb, - const uint8_t* table_argb, - int width); - -void RGBColorTableRow_C(uint8_t* dst_argb, - const uint8_t* table_argb, - int width); -void RGBColorTableRow_X86(uint8_t* dst_argb, - const uint8_t* table_argb, - int width); - -void ARGBQuantizeRow_C(uint8_t* dst_argb, - int scale, - int interval_size, - int interval_offset, - int width); -void ARGBQuantizeRow_SSE2(uint8_t* dst_argb, - int scale, - int interval_size, - int interval_offset, - int width); -void ARGBQuantizeRow_NEON(uint8_t* dst_argb, - int scale, - int interval_size, - int interval_offset, - int width); -void ARGBQuantizeRow_MSA(uint8_t* dst_argb, - int scale, - int interval_size, - int interval_offset, - int width); - -void ARGBShadeRow_C(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - uint32_t value); -void ARGBShadeRow_SSE2(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - uint32_t value); -void ARGBShadeRow_NEON(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - uint32_t value); -void ARGBShadeRow_MSA(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - uint32_t value); -void ARGBShadeRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - uint32_t value); - -// Used for blur. -void CumulativeSumToAverageRow_SSE2(const int32_t* topleft, - const int32_t* botleft, - int width, - int area, - uint8_t* dst, - int count); -void ComputeCumulativeSumRow_SSE2(const uint8_t* row, - int32_t* cumsum, - const int32_t* previous_cumsum, - int width); - -void ComputeCumulativeSumRow_MMI(const uint8_t* row, - int32_t* cumsum, - const int32_t* previous_cumsum, - int width); - -void CumulativeSumToAverageRow_C(const int32_t* tl, - const int32_t* bl, - int w, - int area, - uint8_t* dst, - int count); -void ComputeCumulativeSumRow_C(const uint8_t* row, - int32_t* cumsum, - const int32_t* previous_cumsum, - int width); - -LIBYUV_API -void ARGBAffineRow_C(const uint8_t* src_argb, - int src_argb_stride, - uint8_t* dst_argb, - const float* uv_dudv, - int width); -LIBYUV_API -void ARGBAffineRow_SSE2(const uint8_t* src_argb, - int src_argb_stride, - uint8_t* dst_argb, - const float* src_dudv, - int width); - -// Used for I420Scale, ARGBScale, and ARGBInterpolate. -void InterpolateRow_C(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride, - int width, - int source_y_fraction); -void InterpolateRow_SSSE3(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride, - int dst_width, - int source_y_fraction); -void InterpolateRow_AVX2(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride, - int dst_width, - int source_y_fraction); -void InterpolateRow_NEON(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride, - int dst_width, - int source_y_fraction); -void InterpolateRow_MSA(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride, - int width, - int source_y_fraction); -void InterpolateRow_MMI(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride, - int width, - int source_y_fraction); -void InterpolateRow_Any_NEON(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride_ptr, - int width, - int source_y_fraction); -void InterpolateRow_Any_SSSE3(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride_ptr, - int width, - int source_y_fraction); -void InterpolateRow_Any_AVX2(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride_ptr, - int width, - int source_y_fraction); -void InterpolateRow_Any_MSA(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride_ptr, - int width, - int source_y_fraction); -void InterpolateRow_Any_MMI(uint8_t* dst_ptr, - const uint8_t* src_ptr, - ptrdiff_t src_stride_ptr, - int width, - int source_y_fraction); - -void InterpolateRow_16_C(uint16_t* dst_ptr, - const uint16_t* src_ptr, - ptrdiff_t src_stride, - int width, - int source_y_fraction); - -// Sobel images. -void SobelXRow_C(const uint8_t* src_y0, - const uint8_t* src_y1, - const uint8_t* src_y2, - uint8_t* dst_sobelx, - int width); -void SobelXRow_SSE2(const uint8_t* src_y0, - const uint8_t* src_y1, - const uint8_t* src_y2, - uint8_t* dst_sobelx, - int width); -void SobelXRow_NEON(const uint8_t* src_y0, - const uint8_t* src_y1, - const uint8_t* src_y2, - uint8_t* dst_sobelx, - int width); -void SobelXRow_MSA(const uint8_t* src_y0, - const uint8_t* src_y1, - const uint8_t* src_y2, - uint8_t* dst_sobelx, - int width); -void SobelXRow_MMI(const uint8_t* src_y0, - const uint8_t* src_y1, - const uint8_t* src_y2, - uint8_t* dst_sobelx, - int width); -void SobelYRow_C(const uint8_t* src_y0, - const uint8_t* src_y1, - uint8_t* dst_sobely, - int width); -void SobelYRow_SSE2(const uint8_t* src_y0, - const uint8_t* src_y1, - uint8_t* dst_sobely, - int width); -void SobelYRow_NEON(const uint8_t* src_y0, - const uint8_t* src_y1, - uint8_t* dst_sobely, - int width); -void SobelYRow_MSA(const uint8_t* src_y0, - const uint8_t* src_y1, - uint8_t* dst_sobely, - int width); -void SobelYRow_MMI(const uint8_t* src_y0, - const uint8_t* src_y1, - uint8_t* dst_sobely, - int width); -void SobelRow_C(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelRow_SSE2(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelRow_NEON(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelRow_MSA(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelRow_MMI(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelToPlaneRow_C(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_y, - int width); -void SobelToPlaneRow_SSE2(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_y, - int width); -void SobelToPlaneRow_NEON(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_y, - int width); -void SobelToPlaneRow_MSA(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_y, - int width); -void SobelToPlaneRow_MMI(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_y, - int width); -void SobelXYRow_C(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelXYRow_SSE2(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelXYRow_NEON(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelXYRow_MSA(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelXYRow_MMI(const uint8_t* src_sobelx, - const uint8_t* src_sobely, - uint8_t* dst_argb, - int width); -void SobelRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelToPlaneRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelToPlaneRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelToPlaneRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelToPlaneRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelXYRow_Any_SSE2(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelXYRow_Any_NEON(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelXYRow_Any_MSA(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); -void SobelXYRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); - -void ARGBPolynomialRow_C(const uint8_t* src_argb, - uint8_t* dst_argb, - const float* poly, - int width); -void ARGBPolynomialRow_SSE2(const uint8_t* src_argb, - uint8_t* dst_argb, - const float* poly, - int width); -void ARGBPolynomialRow_AVX2(const uint8_t* src_argb, - uint8_t* dst_argb, - const float* poly, - int width); - -// Scale and convert to half float. -void HalfFloatRow_C(const uint16_t* src, uint16_t* dst, float scale, int width); -void HalfFloatRow_SSE2(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloatRow_Any_SSE2(const uint16_t* src_ptr, - uint16_t* dst_ptr, - float param, - int width); -void HalfFloatRow_AVX2(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloatRow_Any_AVX2(const uint16_t* src_ptr, - uint16_t* dst_ptr, - float param, - int width); -void HalfFloatRow_F16C(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloatRow_Any_F16C(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloat1Row_F16C(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloat1Row_Any_F16C(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloatRow_NEON(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloatRow_Any_NEON(const uint16_t* src_ptr, - uint16_t* dst_ptr, - float param, - int width); -void HalfFloat1Row_NEON(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloat1Row_Any_NEON(const uint16_t* src_ptr, - uint16_t* dst_ptr, - float param, - int width); -void HalfFloatRow_MSA(const uint16_t* src, - uint16_t* dst, - float scale, - int width); -void HalfFloatRow_Any_MSA(const uint16_t* src_ptr, - uint16_t* dst_ptr, - float param, - int width); -void ByteToFloatRow_C(const uint8_t* src, float* dst, float scale, int width); -void ByteToFloatRow_NEON(const uint8_t* src, - float* dst, - float scale, - int width); -void ByteToFloatRow_Any_NEON(const uint8_t* src_ptr, - float* dst_ptr, - float param, - int width); - -void ARGBLumaColorTableRow_C(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - const uint8_t* luma, - uint32_t lumacoeff); -void ARGBLumaColorTableRow_SSSE3(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - const uint8_t* luma, - uint32_t lumacoeff); - -float ScaleMaxSamples_C(const float* src, float* dst, float scale, int width); -float ScaleMaxSamples_NEON(const float* src, - float* dst, - float scale, - int width); -float ScaleSumSamples_C(const float* src, float* dst, float scale, int width); -float ScaleSumSamples_NEON(const float* src, - float* dst, - float scale, - int width); -void ScaleSamples_C(const float* src, float* dst, float scale, int width); -void ScaleSamples_NEON(const float* src, float* dst, float scale, int width); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_ROW_H_ diff --git a/macos/third_party/include/libyuv/scale.h b/macos/third_party/include/libyuv/scale.h deleted file mode 100644 index b937d348ca..0000000000 --- a/macos/third_party/include/libyuv/scale.h +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_SCALE_H_ -#define INCLUDE_LIBYUV_SCALE_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -// Supported filtering. -typedef enum FilterMode { - kFilterNone = 0, // Point sample; Fastest. - kFilterLinear = 1, // Filter horizontally only. - kFilterBilinear = 2, // Faster than box, but lower quality scaling down. - kFilterBox = 3 // Highest quality. -} FilterModeEnum; - -// Scale a YUV plane. -LIBYUV_API -void ScalePlane(const uint8_t* src, - int src_stride, - int src_width, - int src_height, - uint8_t* dst, - int dst_stride, - int dst_width, - int dst_height, - enum FilterMode filtering); - -LIBYUV_API -void ScalePlane_16(const uint16_t* src, - int src_stride, - int src_width, - int src_height, - uint16_t* dst, - int dst_stride, - int dst_width, - int dst_height, - enum FilterMode filtering); - -// Scales a YUV 4:2:0 image from the src width and height to the -// dst width and height. -// If filtering is kFilterNone, a simple nearest-neighbor algorithm is -// used. This produces basic (blocky) quality at the fastest speed. -// If filtering is kFilterBilinear, interpolation is used to produce a better -// quality image, at the expense of speed. -// If filtering is kFilterBox, averaging is used to produce ever better -// quality image, at further expense of speed. -// Returns 0 if successful. - -LIBYUV_API -int I420Scale(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_width, - int src_height, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int dst_width, - int dst_height, - enum FilterMode filtering); - -LIBYUV_API -int I420Scale_16(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - int src_width, - int src_height, - uint16_t* dst_y, - int dst_stride_y, - uint16_t* dst_u, - int dst_stride_u, - uint16_t* dst_v, - int dst_stride_v, - int dst_width, - int dst_height, - enum FilterMode filtering); - -#ifdef __cplusplus -// Legacy API. Deprecated. -LIBYUV_API -int Scale(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - int src_stride_y, - int src_stride_u, - int src_stride_v, - int src_width, - int src_height, - uint8_t* dst_y, - uint8_t* dst_u, - uint8_t* dst_v, - int dst_stride_y, - int dst_stride_u, - int dst_stride_v, - int dst_width, - int dst_height, - LIBYUV_BOOL interpolate); - -// For testing, allow disabling of specialized scalers. -LIBYUV_API -void SetUseReferenceImpl(LIBYUV_BOOL use); -#endif // __cplusplus - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_SCALE_H_ diff --git a/macos/third_party/include/libyuv/scale_argb.h b/macos/third_party/include/libyuv/scale_argb.h deleted file mode 100644 index 7641f18e34..0000000000 --- a/macos/third_party/include/libyuv/scale_argb.h +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2012 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_SCALE_ARGB_H_ -#define INCLUDE_LIBYUV_SCALE_ARGB_H_ - -#include "libyuv/basic_types.h" -#include "libyuv/scale.h" // For FilterMode - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -LIBYUV_API -int ARGBScale(const uint8_t* src_argb, - int src_stride_argb, - int src_width, - int src_height, - uint8_t* dst_argb, - int dst_stride_argb, - int dst_width, - int dst_height, - enum FilterMode filtering); - -// Clipped scale takes destination rectangle coordinates for clip values. -LIBYUV_API -int ARGBScaleClip(const uint8_t* src_argb, - int src_stride_argb, - int src_width, - int src_height, - uint8_t* dst_argb, - int dst_stride_argb, - int dst_width, - int dst_height, - int clip_x, - int clip_y, - int clip_width, - int clip_height, - enum FilterMode filtering); - -// Scale with YUV conversion to ARGB and clipping. -LIBYUV_API -int YUVToARGBScaleClip(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint32_t src_fourcc, - int src_width, - int src_height, - uint8_t* dst_argb, - int dst_stride_argb, - uint32_t dst_fourcc, - int dst_width, - int dst_height, - int clip_x, - int clip_y, - int clip_width, - int clip_height, - enum FilterMode filtering); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_SCALE_ARGB_H_ diff --git a/macos/third_party/include/libyuv/scale_row.h b/macos/third_party/include/libyuv/scale_row.h deleted file mode 100644 index 282d5216ff..0000000000 --- a/macos/third_party/include/libyuv/scale_row.h +++ /dev/null @@ -1,1106 +0,0 @@ -/* - * Copyright 2013 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_SCALE_ROW_H_ -#define INCLUDE_LIBYUV_SCALE_ROW_H_ - -#include "libyuv/basic_types.h" -#include "libyuv/scale.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -#if defined(__pnacl__) || defined(__CLR_VER) || \ - (defined(__native_client__) && defined(__x86_64__)) || \ - (defined(__i386__) && !defined(__SSE__) && !defined(__clang__)) -#define LIBYUV_DISABLE_X86 -#endif -#if defined(__native_client__) -#define LIBYUV_DISABLE_NEON -#endif -// MemorySanitizer does not support assembly code yet. http://crbug.com/344505 -#if defined(__has_feature) -#if __has_feature(memory_sanitizer) -#define LIBYUV_DISABLE_X86 -#endif -#endif -// GCC >= 4.7.0 required for AVX2. -#if defined(__GNUC__) && (defined(__x86_64__) || defined(__i386__)) -#if (__GNUC__ > 4) || (__GNUC__ == 4 && (__GNUC_MINOR__ >= 7)) -#define GCC_HAS_AVX2 1 -#endif // GNUC >= 4.7 -#endif // __GNUC__ - -// clang >= 3.4.0 required for AVX2. -#if defined(__clang__) && (defined(__x86_64__) || defined(__i386__)) -#if (__clang_major__ > 3) || (__clang_major__ == 3 && (__clang_minor__ >= 4)) -#define CLANG_HAS_AVX2 1 -#endif // clang >= 3.4 -#endif // __clang__ - -// Visual C 2012 required for AVX2. -#if defined(_M_IX86) && !defined(__clang__) && defined(_MSC_VER) && \ - _MSC_VER >= 1700 -#define VISUALC_HAS_AVX2 1 -#endif // VisualStudio >= 2012 - -// The following are available on all x86 platforms: -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(_M_IX86) || defined(__x86_64__) || defined(__i386__)) -#define HAS_FIXEDDIV1_X86 -#define HAS_FIXEDDIV_X86 -#define HAS_SCALEARGBCOLS_SSE2 -#define HAS_SCALEARGBCOLSUP2_SSE2 -#define HAS_SCALEARGBFILTERCOLS_SSSE3 -#define HAS_SCALEARGBROWDOWN2_SSE2 -#define HAS_SCALEARGBROWDOWNEVEN_SSE2 -#define HAS_SCALECOLSUP2_SSE2 -#define HAS_SCALEFILTERCOLS_SSSE3 -#define HAS_SCALEROWDOWN2_SSSE3 -#define HAS_SCALEROWDOWN34_SSSE3 -#define HAS_SCALEROWDOWN38_SSSE3 -#define HAS_SCALEROWDOWN4_SSSE3 -#define HAS_SCALEADDROW_SSE2 -#endif - -// The following are available on all x86 platforms, but -// require VS2012, clang 3.4 or gcc 4.7. -// The code supports NaCL but requires a new compiler and validator. -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2) || \ - defined(GCC_HAS_AVX2)) -#define HAS_SCALEADDROW_AVX2 -#define HAS_SCALEROWDOWN2_AVX2 -#define HAS_SCALEROWDOWN4_AVX2 -#endif - -// The following are available on Neon platforms: -#if !defined(LIBYUV_DISABLE_NEON) && \ - (defined(__ARM_NEON__) || defined(LIBYUV_NEON) || defined(__aarch64__)) -#define HAS_SCALEARGBCOLS_NEON -#define HAS_SCALEARGBROWDOWN2_NEON -#define HAS_SCALEARGBROWDOWNEVEN_NEON -#define HAS_SCALEFILTERCOLS_NEON -#define HAS_SCALEROWDOWN2_NEON -#define HAS_SCALEROWDOWN34_NEON -#define HAS_SCALEROWDOWN38_NEON -#define HAS_SCALEROWDOWN4_NEON -#define HAS_SCALEARGBFILTERCOLS_NEON -#endif - -#if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) -#define HAS_SCALEADDROW_MSA -#define HAS_SCALEARGBCOLS_MSA -#define HAS_SCALEARGBFILTERCOLS_MSA -#define HAS_SCALEARGBROWDOWN2_MSA -#define HAS_SCALEARGBROWDOWNEVEN_MSA -#define HAS_SCALEFILTERCOLS_MSA -#define HAS_SCALEROWDOWN2_MSA -#define HAS_SCALEROWDOWN34_MSA -#define HAS_SCALEROWDOWN38_MSA -#define HAS_SCALEROWDOWN4_MSA -#endif - -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_FIXEDDIV1_MIPS -#define HAS_FIXEDDIV_MIPS -#define HAS_SCALEARGBCOLS_MMI -#define HAS_SCALEARGBCOLSUP2_MMI -#define HAS_SCALEARGBROWDOWN2_MMI -#define HAS_SCALEARGBROWDOWNEVEN_MMI -#define HAS_SCALEROWDOWN2_MMI -#define HAS_SCALEROWDOWN4_MMI -#define HAS_SCALEADDROW_MMI -#define HAS_SCALEADDROW_16_MMI -#define HAS_SCALEROWDOWN2_16_MMI -#define HAS_SCALEROWDOWN4_16_MMI -#define HAS_SCALECOLS_MMI -#define HAS_SCALECOLS_16_MMI -#endif - -// Scale ARGB vertically with bilinear interpolation. -void ScalePlaneVertical(int src_height, - int dst_width, - int dst_height, - int src_stride, - int dst_stride, - const uint8_t* src_argb, - uint8_t* dst_argb, - int x, - int y, - int dy, - int bpp, - enum FilterMode filtering); - -void ScalePlaneVertical_16(int src_height, - int dst_width, - int dst_height, - int src_stride, - int dst_stride, - const uint16_t* src_argb, - uint16_t* dst_argb, - int x, - int y, - int dy, - int wpp, - enum FilterMode filtering); - -// Simplify the filtering based on scale factors. -enum FilterMode ScaleFilterReduce(int src_width, - int src_height, - int dst_width, - int dst_height, - enum FilterMode filtering); - -// Divide num by div and return as 16.16 fixed point result. -int FixedDiv_C(int num, int div); -int FixedDiv_X86(int num, int div); -int FixedDiv_MIPS(int num, int div); -// Divide num - 1 by div - 1 and return as 16.16 fixed point result. -int FixedDiv1_C(int num, int div); -int FixedDiv1_X86(int num, int div); -int FixedDiv1_MIPS(int num, int div); -#ifdef HAS_FIXEDDIV_X86 -#define FixedDiv FixedDiv_X86 -#define FixedDiv1 FixedDiv1_X86 -#elif defined HAS_FIXEDDIV_MIPS -#define FixedDiv FixedDiv_MIPS -#define FixedDiv1 FixedDiv1_MIPS -#else -#define FixedDiv FixedDiv_C -#define FixedDiv1 FixedDiv1_C -#endif - -// Compute slope values for stepping. -void ScaleSlope(int src_width, - int src_height, - int dst_width, - int dst_height, - enum FilterMode filtering, - int* x, - int* y, - int* dx, - int* dy); - -void ScaleRowDown2_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Linear_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Linear_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Box_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Box_Odd_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Box_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown4_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown4Box_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4Box_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown34_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown34_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown34_0_Box_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* d, - int dst_width); -void ScaleRowDown34_0_Box_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* d, - int dst_width); -void ScaleRowDown34_1_Box_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* d, - int dst_width); -void ScaleRowDown34_1_Box_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* d, - int dst_width); -void ScaleCols_C(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleCols_16_C(uint16_t* dst_ptr, - const uint16_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleColsUp2_C(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int, - int); -void ScaleColsUp2_16_C(uint16_t* dst_ptr, - const uint16_t* src_ptr, - int dst_width, - int, - int); -void ScaleFilterCols_C(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleFilterCols_16_C(uint16_t* dst_ptr, - const uint16_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleFilterCols64_C(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x32, - int dx); -void ScaleFilterCols64_16_C(uint16_t* dst_ptr, - const uint16_t* src_ptr, - int dst_width, - int x32, - int dx); -void ScaleRowDown38_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown38_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown38_3_Box_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_3_Box_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst_ptr, - int dst_width); -void ScaleRowDown38_2_Box_C(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_2_Box_16_C(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst_ptr, - int dst_width); -void ScaleAddRow_C(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleAddRow_16_C(const uint16_t* src_ptr, - uint32_t* dst_ptr, - int src_width); -void ScaleARGBRowDown2_C(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Linear_C(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Box_C(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEven_C(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEvenBox_C(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBCols_C(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBCols64_C(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x32, - int dx); -void ScaleARGBColsUp2_C(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int, - int); -void ScaleARGBFilterCols_C(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBFilterCols64_C(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x32, - int dx); - -// Specialized scalers for x86. -void ScaleRowDown2_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -void ScaleRowDown34_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_1_Box_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_0_Box_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_3_Box_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_2_Box_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Odd_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2_Any_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_Any_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Any_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Odd_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_Any_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_Any_AVX2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -void ScaleRowDown34_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_1_Box_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_0_Box_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_3_Box_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_2_Box_Any_SSSE3(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -void ScaleAddRow_SSE2(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleAddRow_AVX2(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleAddRow_Any_SSE2(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int src_width); -void ScaleAddRow_Any_AVX2(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int src_width); - -void ScaleFilterCols_SSSE3(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleColsUp2_SSE2(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); - -// ARGB Column functions -void ScaleARGBCols_SSE2(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBFilterCols_SSSE3(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBColsUp2_SSE2(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBFilterCols_NEON(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBCols_NEON(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBFilterCols_Any_NEON(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleARGBCols_Any_NEON(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleARGBFilterCols_MSA(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBCols_MSA(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBFilterCols_Any_MSA(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleARGBCols_Any_MSA(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleARGBCols_MMI(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBCols_Any_MMI(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); - -// ARGB Row functions -void ScaleARGBRowDown2_SSE2(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Linear_SSE2(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Box_SSE2(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleARGBRowDown2Linear_NEON(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleARGBRowDown2_MSA(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Linear_MSA(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Box_MSA(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2_MMI(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Linear_MMI(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2Box_MMI(const uint8_t* src_argb, - ptrdiff_t src_stride, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDown2_Any_SSE2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Linear_Any_SSE2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Box_Any_SSE2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Linear_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Linear_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Linear_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDown2Box_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEven_SSE2(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEvenBox_SSE2(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEven_NEON(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEvenBox_NEON(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEven_MSA(const uint8_t* src_argb, - ptrdiff_t src_stride, - int32_t src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEvenBox_MSA(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEven_MMI(const uint8_t* src_argb, - ptrdiff_t src_stride, - int32_t src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEvenBox_MMI(const uint8_t* src_argb, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_argb, - int dst_width); -void ScaleARGBRowDownEven_Any_SSE2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEvenBox_Any_SSE2(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEven_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEvenBox_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEven_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int32_t src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEvenBox_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEven_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int32_t src_stepx, - uint8_t* dst_ptr, - int dst_width); -void ScaleARGBRowDownEvenBox_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_ptr, - int dst_width); - -// ScaleRowDown2Box also used by planar functions -// NEON downscalers with interpolation. - -// Note - not static due to reuse in convert for 444 to 420. -void ScaleRowDown2_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Linear_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); - -void ScaleRowDown4_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -// Down scale from 4 to 3 pixels. Use the neon multilane read/write -// to load up the every 4th pixel into a 4 different registers. -// Point samples 32 pixels to 24 pixels. -void ScaleRowDown34_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_0_Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_1_Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -// 32 -> 12 -void ScaleRowDown38_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -// 32x3 -> 12x1 -void ScaleRowDown38_3_Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -// 32x2 -> 12x1 -void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -void ScaleRowDown2_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Odd_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_0_Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_1_Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -// 32 -> 12 -void ScaleRowDown38_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -// 32x3 -> 12x1 -void ScaleRowDown38_3_Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -// 32x2 -> 12x1 -void ScaleRowDown38_2_Box_Any_NEON(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -void ScaleAddRow_NEON(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleAddRow_Any_NEON(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int src_width); - -void ScaleFilterCols_NEON(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); - -void ScaleFilterCols_Any_NEON(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); - -void ScaleRowDown2_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Linear_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Box_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4Box_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown38_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown38_2_Box_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_3_Box_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleAddRow_MSA(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleFilterCols_MSA(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleRowDown34_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown34_0_Box_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* d, - int dst_width); -void ScaleRowDown34_1_Box_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* d, - int dst_width); - -void ScaleRowDown2_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_2_Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown38_3_Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleAddRow_Any_MSA(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int src_width); -void ScaleFilterCols_Any_MSA(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleRowDown34_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_0_Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown34_1_Box_Any_MSA(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); - -void ScaleRowDown2_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Linear_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Linear_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Box_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown2Box_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Box_Odd_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown4Box_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4Box_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleAddRow_MMI(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleAddRow_16_MMI(const uint16_t* src_ptr, - uint32_t* dst_ptr, - int src_width); -void ScaleColsUp2_MMI(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleColsUp2_16_MMI(uint16_t* dst_ptr, - const uint16_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleARGBColsUp2_MMI(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); - -void ScaleRowDown2_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Linear_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown2Box_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleRowDown4Box_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleAddRow_Any_MMI(const uint8_t* src_ptr, - uint16_t* dst_ptr, - int src_width); -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_SCALE_ROW_H_ diff --git a/macos/third_party/include/libyuv/version.h b/macos/third_party/include/libyuv/version.h deleted file mode 100644 index 517be841b4..0000000000 --- a/macos/third_party/include/libyuv/version.h +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright 2012 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef INCLUDE_LIBYUV_VERSION_H_ -#define INCLUDE_LIBYUV_VERSION_H_ - -#define LIBYUV_VERSION 1716 - -#endif // INCLUDE_LIBYUV_VERSION_H_ diff --git a/macos/third_party/include/libyuv/video_common.h b/macos/third_party/include/libyuv/video_common.h deleted file mode 100644 index ffcbdbf1b0..0000000000 --- a/macos/third_party/include/libyuv/video_common.h +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright 2011 The LibYuv Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Common definitions for video, including fourcc and VideoFormat. - -#ifndef INCLUDE_LIBYUV_VIDEO_COMMON_H_ -#define INCLUDE_LIBYUV_VIDEO_COMMON_H_ - -#include "libyuv/basic_types.h" - -#ifdef __cplusplus -namespace libyuv { -extern "C" { -#endif - -////////////////////////////////////////////////////////////////////////////// -// Definition of FourCC codes -////////////////////////////////////////////////////////////////////////////// - -// Convert four characters to a FourCC code. -// Needs to be a macro otherwise the OS X compiler complains when the kFormat* -// constants are used in a switch. -#ifdef __cplusplus -#define FOURCC(a, b, c, d) \ - ((static_cast(a)) | (static_cast(b) << 8) | \ - (static_cast(c) << 16) | /* NOLINT */ \ - (static_cast(d) << 24)) /* NOLINT */ -#else -#define FOURCC(a, b, c, d) \ - (((uint32_t)(a)) | ((uint32_t)(b) << 8) | /* NOLINT */ \ - ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24)) /* NOLINT */ -#endif - -// Some pages discussing FourCC codes: -// http://www.fourcc.org/yuv.php -// http://v4l2spec.bytesex.org/spec/book1.htm -// http://developer.apple.com/quicktime/icefloe/dispatch020.html -// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12 -// http://people.xiph.org/~xiphmont/containers/nut/nut4cc.txt - -// FourCC codes grouped according to implementation efficiency. -// Primary formats should convert in 1 efficient step. -// Secondary formats are converted in 2 steps. -// Auxilliary formats call primary converters. -enum FourCC { - // 9 Primary YUV formats: 5 planar, 2 biplanar, 2 packed. - FOURCC_I420 = FOURCC('I', '4', '2', '0'), - FOURCC_I422 = FOURCC('I', '4', '2', '2'), - FOURCC_I444 = FOURCC('I', '4', '4', '4'), - FOURCC_I400 = FOURCC('I', '4', '0', '0'), - FOURCC_NV21 = FOURCC('N', 'V', '2', '1'), - FOURCC_NV12 = FOURCC('N', 'V', '1', '2'), - FOURCC_YUY2 = FOURCC('Y', 'U', 'Y', '2'), - FOURCC_UYVY = FOURCC('U', 'Y', 'V', 'Y'), - FOURCC_H010 = FOURCC('H', '0', '1', '0'), // unofficial fourcc. 10 bit lsb - - // 1 Secondary YUV format: row biplanar. - FOURCC_M420 = FOURCC('M', '4', '2', '0'), - - // 11 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp, 1 10 bpc - FOURCC_ARGB = FOURCC('A', 'R', 'G', 'B'), - FOURCC_BGRA = FOURCC('B', 'G', 'R', 'A'), - FOURCC_ABGR = FOURCC('A', 'B', 'G', 'R'), - FOURCC_AR30 = FOURCC('A', 'R', '3', '0'), // 10 bit per channel. 2101010. - FOURCC_AB30 = FOURCC('A', 'B', '3', '0'), // ABGR version of 10 bit - FOURCC_24BG = FOURCC('2', '4', 'B', 'G'), - FOURCC_RAW = FOURCC('r', 'a', 'w', ' '), - FOURCC_RGBA = FOURCC('R', 'G', 'B', 'A'), - FOURCC_RGBP = FOURCC('R', 'G', 'B', 'P'), // rgb565 LE. - FOURCC_RGBO = FOURCC('R', 'G', 'B', 'O'), // argb1555 LE. - FOURCC_R444 = FOURCC('R', '4', '4', '4'), // argb4444 LE. - - // 1 Primary Compressed YUV format. - FOURCC_MJPG = FOURCC('M', 'J', 'P', 'G'), - - // 8 Auxiliary YUV variations: 3 with U and V planes are swapped, 1 Alias. - FOURCC_YV12 = FOURCC('Y', 'V', '1', '2'), - FOURCC_YV16 = FOURCC('Y', 'V', '1', '6'), - FOURCC_YV24 = FOURCC('Y', 'V', '2', '4'), - FOURCC_YU12 = FOURCC('Y', 'U', '1', '2'), // Linux version of I420. - FOURCC_J420 = FOURCC('J', '4', '2', '0'), - FOURCC_J400 = FOURCC('J', '4', '0', '0'), // unofficial fourcc - FOURCC_H420 = FOURCC('H', '4', '2', '0'), // unofficial fourcc - FOURCC_H422 = FOURCC('H', '4', '2', '2'), // unofficial fourcc - - // 14 Auxiliary aliases. CanonicalFourCC() maps these to canonical fourcc. - FOURCC_IYUV = FOURCC('I', 'Y', 'U', 'V'), // Alias for I420. - FOURCC_YU16 = FOURCC('Y', 'U', '1', '6'), // Alias for I422. - FOURCC_YU24 = FOURCC('Y', 'U', '2', '4'), // Alias for I444. - FOURCC_YUYV = FOURCC('Y', 'U', 'Y', 'V'), // Alias for YUY2. - FOURCC_YUVS = FOURCC('y', 'u', 'v', 's'), // Alias for YUY2 on Mac. - FOURCC_HDYC = FOURCC('H', 'D', 'Y', 'C'), // Alias for UYVY. - FOURCC_2VUY = FOURCC('2', 'v', 'u', 'y'), // Alias for UYVY on Mac. - FOURCC_JPEG = FOURCC('J', 'P', 'E', 'G'), // Alias for MJPG. - FOURCC_DMB1 = FOURCC('d', 'm', 'b', '1'), // Alias for MJPG on Mac. - FOURCC_BA81 = FOURCC('B', 'A', '8', '1'), // Alias for BGGR. - FOURCC_RGB3 = FOURCC('R', 'G', 'B', '3'), // Alias for RAW. - FOURCC_BGR3 = FOURCC('B', 'G', 'R', '3'), // Alias for 24BG. - FOURCC_CM32 = FOURCC(0, 0, 0, 32), // Alias for BGRA kCMPixelFormat_32ARGB - FOURCC_CM24 = FOURCC(0, 0, 0, 24), // Alias for RAW kCMPixelFormat_24RGB - FOURCC_L555 = FOURCC('L', '5', '5', '5'), // Alias for RGBO. - FOURCC_L565 = FOURCC('L', '5', '6', '5'), // Alias for RGBP. - FOURCC_5551 = FOURCC('5', '5', '5', '1'), // Alias for RGBO. - - // deprecated formats. Not supported, but defined for backward compatibility. - FOURCC_I411 = FOURCC('I', '4', '1', '1'), - FOURCC_Q420 = FOURCC('Q', '4', '2', '0'), - FOURCC_RGGB = FOURCC('R', 'G', 'G', 'B'), - FOURCC_BGGR = FOURCC('B', 'G', 'G', 'R'), - FOURCC_GRBG = FOURCC('G', 'R', 'B', 'G'), - FOURCC_GBRG = FOURCC('G', 'B', 'R', 'G'), - FOURCC_H264 = FOURCC('H', '2', '6', '4'), - - // Match any fourcc. - FOURCC_ANY = -1, -}; - -enum FourCCBpp { - // Canonical fourcc codes used in our code. - FOURCC_BPP_I420 = 12, - FOURCC_BPP_I422 = 16, - FOURCC_BPP_I444 = 24, - FOURCC_BPP_I411 = 12, - FOURCC_BPP_I400 = 8, - FOURCC_BPP_NV21 = 12, - FOURCC_BPP_NV12 = 12, - FOURCC_BPP_YUY2 = 16, - FOURCC_BPP_UYVY = 16, - FOURCC_BPP_M420 = 12, - FOURCC_BPP_Q420 = 12, - FOURCC_BPP_ARGB = 32, - FOURCC_BPP_BGRA = 32, - FOURCC_BPP_ABGR = 32, - FOURCC_BPP_RGBA = 32, - FOURCC_BPP_AR30 = 32, - FOURCC_BPP_AB30 = 32, - FOURCC_BPP_24BG = 24, - FOURCC_BPP_RAW = 24, - FOURCC_BPP_RGBP = 16, - FOURCC_BPP_RGBO = 16, - FOURCC_BPP_R444 = 16, - FOURCC_BPP_RGGB = 8, - FOURCC_BPP_BGGR = 8, - FOURCC_BPP_GRBG = 8, - FOURCC_BPP_GBRG = 8, - FOURCC_BPP_YV12 = 12, - FOURCC_BPP_YV16 = 16, - FOURCC_BPP_YV24 = 24, - FOURCC_BPP_YU12 = 12, - FOURCC_BPP_J420 = 12, - FOURCC_BPP_J400 = 8, - FOURCC_BPP_H420 = 12, - FOURCC_BPP_H422 = 16, - FOURCC_BPP_H010 = 24, - FOURCC_BPP_MJPG = 0, // 0 means unknown. - FOURCC_BPP_H264 = 0, - FOURCC_BPP_IYUV = 12, - FOURCC_BPP_YU16 = 16, - FOURCC_BPP_YU24 = 24, - FOURCC_BPP_YUYV = 16, - FOURCC_BPP_YUVS = 16, - FOURCC_BPP_HDYC = 16, - FOURCC_BPP_2VUY = 16, - FOURCC_BPP_JPEG = 1, - FOURCC_BPP_DMB1 = 1, - FOURCC_BPP_BA81 = 8, - FOURCC_BPP_RGB3 = 24, - FOURCC_BPP_BGR3 = 24, - FOURCC_BPP_CM32 = 32, - FOURCC_BPP_CM24 = 24, - - // Match any fourcc. - FOURCC_BPP_ANY = 0, // 0 means unknown. -}; - -// Converts fourcc aliases into canonical ones. -LIBYUV_API uint32_t CanonicalFourCC(uint32_t fourcc); - -#ifdef __cplusplus -} // extern "C" -} // namespace libyuv -#endif - -#endif // INCLUDE_LIBYUV_VIDEO_COMMON_H_ diff --git a/macos/third_party/lib/libyuv_internal.a b/macos/third_party/lib/libyuv_internal.a deleted file mode 100644 index 1ab188bd57..0000000000 Binary files a/macos/third_party/lib/libyuv_internal.a and /dev/null differ diff --git a/pubspec.yaml b/pubspec.yaml index fad3df7235..719c1a1309 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,21 +1,28 @@ name: flutter_webrtc description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC. -version: 0.3.3 +version: 0.14.1 homepage: https://github.com/cloudwebrtc/flutter-webrtc environment: - sdk: '>=2.2.2 <3.0.0' - flutter: ^1.10.0 + sdk: ">=3.3.0 <4.0.0" + flutter: ">=1.22.0" dependencies: + collection: ^1.17.0 + dart_webrtc: ^1.5.3+hotfix.2 flutter: sdk: flutter + path_provider: ^2.0.2 + web: ^1.0.0 + webrtc_interface: ^1.2.2+hotfix.2 dev_dependencies: - pedantic: ^1.9.0 flutter_test: sdk: flutter - test: - + import_sorter: ^4.6.0 + lints: ^4.0.0 + pedantic: ^1.11.1 + test: any + flutter: plugin: platforms: @@ -26,3 +33,9 @@ flutter: pluginClass: FlutterWebRTCPlugin macos: pluginClass: FlutterWebRTCPlugin + windows: + pluginClass: FlutterWebRTCPlugin + linux: + pluginClass: FlutterWebRTCPlugin + elinux: + pluginClass: FlutterWebRTCPlugin diff --git a/test/unit/rct_peerconnection_test.dart b/test/unit/rtc_peerconnection_test.dart similarity index 62% rename from test/unit/rct_peerconnection_test.dart rename to test/unit/rtc_peerconnection_test.dart index d25cca95cb..d7f86d9302 100644 --- a/test/unit/rct_peerconnection_test.dart +++ b/test/unit/rtc_peerconnection_test.dart @@ -1,19 +1,21 @@ import 'package:flutter/services.dart'; + import 'package:flutter_test/flutter_test.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; -import 'package:flutter_webrtc/src/rtc_peerconnection.dart'; + +import 'package:flutter_webrtc/src/native/rtc_data_channel_impl.dart'; +import 'package:flutter_webrtc/src/native/rtc_peerconnection_impl.dart'; void main() { TestWidgetsFlutterBinding.ensureInitialized(); - final channel = WebRTC.methodChannel(); + final channel = MethodChannel('FlutterWebRTC.Method'); setUp(() { channel.setMockMethodCallHandler((MethodCall methodCall) async { await ServicesBinding.instance.defaultBinaryMessenger .handlePlatformMessage( - 'FlutterWebRTC/peerConnectoinEvent', null, (ByteData data) {}); + 'FlutterWebRTC/peerConnectionEvent', null, (ByteData? data) {}); await ServicesBinding.instance.defaultBinaryMessenger .handlePlatformMessage( - 'FlutterWebRTC/dataChannelEvent', null, (ByteData data) {}); + 'FlutterWebRTC/dataChannelEvent', null, (ByteData? data) {}); }); }); @@ -24,7 +26,7 @@ void main() { test( 'Validate that not setting any public delegate this will not break the implementation by throwing NPE', () { - final rct = RTCPeerConnection('', {}); + final pc = RTCPeerConnectionNative('', {}); final events = [ 'signalingState', 'iceGatheringState', @@ -38,8 +40,18 @@ void main() { 'onRenegotiationNeeded' ]; - events.forEach((event) { - rct.eventListener({ + pc.onDataChannel = (dc) { + final channel = dc as RTCDataChannelNative; + channel.eventListener({ + 'event': 'dataChannelStateChanged', + 'id': 0, + 'flutterId': '', + 'state': 'open' + }); + }; + + for (var event in events) { + pc.eventListener({ 'event': event, //Minimum values for signalingState, iceGatheringState, iceConnectionState @@ -53,14 +65,20 @@ void main() { 'audioTracks': [], 'videoTracks': [], + //Minimum values for onRemoveTrack + 'trackId': '', + //Minimum values for onAddTrack 'track': { - 'trackId': '', + 'id': '', 'label': '', 'kind': '', 'enabled': false, - } + }, + 'id': 0, + 'label': '', + 'flutterId': '', }); - }); + } }); } diff --git a/test/unit/web/rtc_videw_view_test.dart b/test/unit/web/rtc_videw_view_test.dart index caa4976271..9b451cf77c 100644 --- a/test/unit/web/rtc_videw_view_test.dart +++ b/test/unit/web/rtc_videw_view_test.dart @@ -1,7 +1,9 @@ @TestOn('browser') +library; + import 'package:flutter_test/flutter_test.dart'; -import 'package:flutter_webrtc/src/web/get_user_media.dart'; -import 'package:flutter_webrtc/src/web/rtc_video_view.dart'; + +import 'package:flutter_webrtc/flutter_webrtc.dart'; void main() { // TODO(wer-mathurin): should revisit after this bug is resolved, https://github.com/flutter/flutter/issues/66045. diff --git a/third_party/libwebrtc/include/base/atomicops.h b/third_party/libwebrtc/include/base/atomicops.h new file mode 100644 index 0000000000..4a3b79916a --- /dev/null +++ b/third_party/libwebrtc/include/base/atomicops.h @@ -0,0 +1,75 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LIB_WEBRTC_ATOMICOPS_H_ +#define LIB_WEBRTC_ATOMICOPS_H_ + +#if defined(WIN32) || defined(_WINDOWS) +// Include winsock2.h before including to maintain consistency with +// win32.h. We can't include win32.h directly here since it pulls in +// headers such as basictypes.h which causes problems in Chromium where webrtc +// exists as two separate projects, webrtc and libjingle. +#include +#endif // defined(WIN32) + +namespace libwebrtc { +class AtomicOps { + public: +#if defined(WIN32) || defined(_WINDOWS) + // Assumes sizeof(int) == sizeof(LONG), which it is on Win32 and Win64. + static int Increment(volatile int* i) { + return ::InterlockedIncrement(reinterpret_cast(i)); + } + static int Decrement(volatile int* i) { + return ::InterlockedDecrement(reinterpret_cast(i)); + } + static int AcquireLoad(volatile const int* i) { return *i; } + static void ReleaseStore(volatile int* i, int value) { *i = value; } + static int CompareAndSwap(volatile int* i, int old_value, int new_value) { + return ::InterlockedCompareExchange(reinterpret_cast(i), + new_value, old_value); + } + // Pointer variants. + template + static T* AcquireLoadPtr(T* volatile* ptr) { + return *ptr; + } + template + static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) { + return static_cast(::InterlockedCompareExchangePointer( + reinterpret_cast(ptr), new_value, old_value)); + } +#else + static int Increment(volatile int* i) { return __sync_add_and_fetch(i, 1); } + static int Decrement(volatile int* i) { return __sync_sub_and_fetch(i, 1); } + static int AcquireLoad(volatile const int* i) { + return __atomic_load_n(i, __ATOMIC_ACQUIRE); + } + static void ReleaseStore(volatile int* i, int value) { + __atomic_store_n(i, value, __ATOMIC_RELEASE); + } + static int CompareAndSwap(volatile int* i, int old_value, int new_value) { + return __sync_val_compare_and_swap(i, old_value, new_value); + } + // Pointer variants. + template + static T* AcquireLoadPtr(T* volatile* ptr) { + return __atomic_load_n(ptr, __ATOMIC_ACQUIRE); + } + template + static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) { + return __sync_val_compare_and_swap(ptr, old_value, new_value); + } +#endif +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_ATOMICOPS_H_ diff --git a/third_party/libwebrtc/include/base/fixed_size_function.h b/third_party/libwebrtc/include/base/fixed_size_function.h new file mode 100644 index 0000000000..1070b45232 --- /dev/null +++ b/third_party/libwebrtc/include/base/fixed_size_function.h @@ -0,0 +1,296 @@ +// +// Copyright (c) 2014-2016 Pavel Medvedev. All rights reserved. +// +// Distributed under the MIT software license, see the accompanying +// file LICENSE + +#ifndef FIXED_SIZE_FUNCTION_HPP_INCLUDED +#define FIXED_SIZE_FUNCTION_HPP_INCLUDED + +#include +#include +#include +#include + +enum class construct_type { + none, + copy, + move, + copy_and_move, +}; + +namespace details { + +// V-table implementation +template +struct fixed_function_vtable_base { + Ret (*call)(void*, Args&&...) = nullptr; + void (*destroy)(void*) = nullptr; +}; + +template +struct fixed_function_vtable; + +template +struct fixed_function_vtable + : fixed_function_vtable_base {}; + +template +struct fixed_function_vtable + : fixed_function_vtable_base { + void (*copy)(const void*, void*) = nullptr; +}; + +template +struct fixed_function_vtable + : fixed_function_vtable_base { + void (*move)(void*, void*) = nullptr; +}; + +template +struct fixed_function_vtable + : fixed_function_vtable_base { + void (*copy)(const void*, void*) = nullptr; + void (*move)(void*, void*) = nullptr; +}; + +} // namespace details + +template +class fixed_size_function; + +template +class fixed_size_function { + public: + // Compile-time information + + using is_copyable = + std::integral_constant; + using is_movable = + std::integral_constant; + + using result_type = Ret; + + static const std::size_t arity = sizeof...(Args); + + template + struct argument { + static_assert(N < arity, "invalid argument index"); + using type = typename std::tuple_element>::type; + }; + + public: + template + fixed_size_function(fixed_size_function const&) = delete; + template + fixed_size_function(fixed_size_function&) = delete; + template + fixed_size_function(fixed_size_function&&) = delete; + template + fixed_size_function& operator=(fixed_size_function const&) = delete; + template + fixed_size_function& operator=(fixed_size_function&) = delete; + template + fixed_size_function& operator=(fixed_size_function&&) = delete; + template + void assign(fixed_size_function const&) = delete; + template + void assign(fixed_size_function&) = delete; + template + void assign(fixed_size_function&&) = delete; + + fixed_size_function() {} + + ~fixed_size_function() { reset(); } + + fixed_size_function(std::nullptr_t) {} + + fixed_size_function& operator=(std::nullptr_t) { + reset(); + return *this; + } + + fixed_size_function(fixed_size_function const& src) { copy(src); } + + fixed_size_function& operator=(fixed_size_function const& src) { + assign(src); + return *this; + } + + fixed_size_function(fixed_size_function& src) { copy(src); } + + fixed_size_function& operator=(fixed_size_function& src) { + assign(src); + return *this; + } + + fixed_size_function(fixed_size_function&& src) { + move(std::move(src), is_movable()); + } + + fixed_size_function& operator=(fixed_size_function&& src) { + assign(std::move(src)); + return *this; + } + + template + fixed_size_function(Functor&& f) { + create(std::forward(f)); + } + + template + fixed_size_function& operator=(Functor&& f) { + assign(std::forward(f)); + return *this; + } + + void assign(fixed_size_function const& src) { + reset(); + copy(src); + } + + void assign(fixed_size_function& src) { + reset(); + copy(src); + } + + void assign(fixed_size_function&& src) { + reset(); + move(std::move(src), is_movable()); + } + + template + void assign(Functor&& f) { + reset(); + create(std::forward(f)); + } + + void reset() { + auto destroy = vtable_.destroy; + if (destroy) { + vtable_ = vtable(); + destroy(&storage_); + } + } + + explicit operator bool() const { return vtable_.call != nullptr; } + + Ret operator()(Args... args) { + return vtable_.call ? vtable_.call(&storage_, std::forward(args)...) + : (Ret) nullptr; + } + + void swap(fixed_size_function& other) { + fixed_size_function tmp = std::move(other); + other = std::move(*this); + *this = std::move(tmp); + } + + friend void swap(fixed_size_function& lhs, fixed_size_function& rhs) { + lhs.swap(rhs); + } + + friend bool operator==(std::nullptr_t, fixed_size_function const& f) { + return !f; + } + + friend bool operator==(fixed_size_function const& f, std::nullptr_t) { + return !f; + } + + friend bool operator!=(std::nullptr_t, fixed_size_function const& f) { + return f; + } + + friend bool operator!=(fixed_size_function const& f, std::nullptr_t) { + return f; + } + + private: + template + void create(Functor&& f) { + using functor_type = typename std::decay::type; + static_assert(sizeof(functor_type) <= StorageSize, + "Functor must be smaller than storage buffer"); + + new (&storage_) functor_type(std::forward(f)); + + vtable_.call = &call_impl; + vtable_.destroy = &destroy_impl; + init_copy(is_copyable()); + init_move(is_movable()); + } + + void copy(fixed_size_function const& src) { + if (src.vtable_.copy) { + src.vtable_.copy(&src.storage_, &storage_); + vtable_ = src.vtable_; + } + } + + void move(fixed_size_function&& src, std::true_type movable) { + if (src.vtable_.move) { + src.vtable_.move(&src.storage_, &storage_); + vtable_ = src.vtable_; + src.reset(); + } + } + + void move(fixed_size_function const& src, std::false_type movable) { + copy(src); + } + + private: + template + static Ret call_impl(void* functor, Args&&... args) { + return (*static_cast(functor))(std::forward(args)...); + } + + template + static void destroy_impl(void* functor) { + static_cast(functor)->~Functor(); + } + + template + static void copy_impl(void const* functor, void* dest) { + new (dest) Functor(*static_cast(functor)); + } + + template + static void move_impl(void* functor, void* dest) { + new (dest) Functor(std::move(*static_cast(functor))); + } + + template + void init_copy(std::true_type /*copyable*/) { + vtable_.copy = ©_impl; + } + + template + void init_copy(std::false_type /*copyable*/) {} + + template + void init_move(std::true_type /*movable*/) { + vtable_.move = &move_impl; + } + + template + void init_move(std::false_type /*movable*/) {} + + private: + using vtable = + details::fixed_function_vtable; + static const size_t StorageSize = MaxSize - sizeof(vtable); + using storage = typename std::aligned_storage::type; + + vtable vtable_; + storage storage_; +}; + +#endif // FIXED_SIZE_FUNCTION_HPP_INCLUDED \ No newline at end of file diff --git a/third_party/libwebrtc/include/base/portable.h b/third_party/libwebrtc/include/base/portable.h new file mode 100644 index 0000000000..b403af9cd3 --- /dev/null +++ b/third_party/libwebrtc/include/base/portable.h @@ -0,0 +1,439 @@ +#ifndef INFINISPAN_HOTROD_PORTABLE_H +#define INFINISPAN_HOTROD_PORTABLE_H + +#ifdef LIB_WEBRTC_API_EXPORTS +#define LIB_PORTABLE_API __declspec(dllexport) +#elif defined(LIB_WEBRTC_API_DLL) +#define LIB_PORTABLE_API __declspec(dllimport) +#elif !defined(WIN32) +#define LIB_PORTABLE_API __attribute__((visibility("default"))) +#else +#define LIB_PORTABLE_API +#endif + +#include +#include +#include +#include +#include + +/** + * This file defines structures that can be passed across shared library/DLL + * boundary. + * + * Besides memory layout, the class must be destroyed in the same library as + * created. None of these classes is thread-safe. The classes are not optimized + * for performance. + */ + +namespace portable { + +#ifdef _MSC_VER +#define strncpy_safe strncpy_s +#else +#ifndef _TRUNCATE +#define _TRUNCATE ((size_t)-1) +#endif // _TRUNCATE +#endif + +#define PORTABLE_STRING_BUF_SIZE 48 + +class string { + private: + char m_buf[PORTABLE_STRING_BUF_SIZE]; + char* m_dynamic; + size_t m_length; + + public: + LIB_PORTABLE_API string(); + LIB_PORTABLE_API void init(const char* str, size_t len); + LIB_PORTABLE_API void destroy(); + + inline string(const char* str) { init(str, strlen(str)); } + + inline string(const std::string& str) { init(str.c_str(), str.length()); } + + inline string(const string& o) { + init(o.m_dynamic == 0 ? o.m_buf : o.m_dynamic, o.m_length); + } + + inline string& operator=(const string& o) { + destroy(); + init(o.m_dynamic == 0 ? o.m_buf : o.m_dynamic, o.m_length); + return *this; + } + + LIB_PORTABLE_API ~string(); + + inline string& operator=(const std::string& str) { + destroy(); + init(str.c_str(), str.length()); + return *this; + } + + inline size_t size() { return m_length; } + + inline const char* c_string() const { + return m_dynamic == 0 ? m_buf : m_dynamic; + } + + inline std::string std_string() const { + return std::string(m_dynamic == 0 ? m_buf : m_dynamic, m_length); + } +}; + +inline std::string to_std_string(const string& str) { return str.std_string(); } + +template +class identity { + T operator()(const T& x) { return x; } +}; + +template +class vector { + protected: + using raw_type = typename std::aligned_storage::type; + + private: + T* m_array; + size_t m_size; + + public: + class move_ref { + friend class vector; + + private: + vector& m_ref; + move_ref(vector& ref) : m_ref(ref) {} + }; + + vector() : m_array(0), m_size(0) {} + vector(T* array, size_t s) : m_array(array), m_size(s) {} + + template + vector(const Iterable& v) { + m_size = v.size(); + if (v.size() == 0) { + m_array = 0; + } else { + m_array = new T[v.size()]; + size_t i = 0; + for (typename Iterable::const_iterator it = v.begin(); it != v.end(); + ++it) { + m_array[i++] = *it; + } + } + } + + template + vector(const Iterable& v, Converter convert) { + m_size = v.size(); + if (v.size() == 0) { + m_array = 0; + } else { + m_array = new T[v.size()]; + size_t i = 0; + for (typename Iterable::const_iterator it = v.begin(); it != v.end(); + ++it) { + m_array[i++] = convert(*it); + } + } + } + + vector(const vector& o) { + m_size = o.m_size; + if (m_size != 0) { + m_array = new T[o.m_size]; + for (size_t i = 0; i < o.m_size; ++i) { + m_array[i] = o.m_array[i]; + } + } + } + + ~vector() { destroy_all(); } + + vector& operator=(const vector& o) { + if (m_size < o.m_size) { + destroy_all(); + m_array = new T[o.m_size]; + } else if (o.m_size == 0 && m_size != 0) { + destroy_all(); + } + m_size = o.m_size; + for (size_t i = 0; i < o.m_size; ++i) { + m_array[i] = o.m_array[i]; + } + return *this; + } + + vector(move_ref mr) : m_array(mr.m_ref.m_array), m_size(mr.m_ref.m_size) {} + vector& operator=(move_ref mr) { + if (m_size != 0) { + destroy_all(); + } + m_size = mr.m_ref.m_size; + m_array = mr.m_ref.m_array; + mr.m_ref.m_size = 0; + mr.m_ref.m_array = 0; + return *this; + } + /** + * Not really safe - can't be used as vector(something).move(), + * but vector tmp(something); other = tmp.move(); + */ + move_ref move() { return move_ref(*this); } + + std::vector std_vector() const { + std::vector v; + v.reserve(m_size); + for (size_t i = 0; i < m_size; ++i) { + v.push_back(m_array[i]); + } + return v; + } + + const T* data() const { return m_array; } + + size_t size() const { return m_size; } + + T& operator[](size_t i) { return m_array[i]; } + + const T& operator[](size_t i) const { return m_array[i]; } + + void clear() { destroy_all(); } + + protected: + void destroy(T* rt) { reinterpret_cast(rt)->~T(); } + + void destroy_all() { + for (size_t i = 0; i < m_size; ++i) { + destroy(&m_array[i]); + } + m_size = 0; + } +}; + +template +class pair { + public: + K key; + V value; +}; + +template +class map { + private: + typedef pair my_pair; + + vector m_vec; + + /*template + static pair *to_array(const std::map &m, + K (*convertKey)(const K2 &), + V (*convertValue)(const V2 &)) + { + my_pair *data = new my_pair[m.size()]; + my_pair *dp = data; + for (std::map::const_iterator it = m.begin(); it != m.end(); ++it) + { dp->key = convertKey(it->first); dp->value = convertValue(it->second); + ++dp; + } + return data; + }*/ + + template + static my_pair* to_array(const std::map& m, KC convertKey, + VC convertValue) { + my_pair* data = new my_pair[m.size()]; + my_pair* dp = data; + for (typename std::map::const_iterator it = m.begin(); + it != m.end(); ++it) { + dp->key = convertKey(it->first); + dp->value = convertValue(it->second); + ++dp; + } + return data; + } + + public: + class move_ref { + friend class map; + + private: + map& m_ref; + move_ref(map& ref) : m_ref(ref) {} + }; + + map() {} + + /* template map(const std::map &m, + K (*convertKey)(const K2 &) = identity, + V (*convertValue)(const V2 &) = identity): + m_vec(to_array(m, convertKey, convertValue), m.size()) {}*/ + + map(const std::map& m) + : m_vec(to_array(m, identity(), identity()), m.size()) {} + + template + map(const std::map& m, KC convertKey = identity(), + VC convertValue = identity()) + : m_vec(to_array(m, convertKey, convertValue), m.size()) {} + + map(const map& o) { m_vec = o.m_vec; } + + map& operator=(const map& o) { + m_vec = o.m_vec; + return *this; + } + + map(move_ref mr) : m_vec(mr.m_ref.m_vec.move()) {} + map& operator=(move_ref mr) { + m_vec = mr.m_ref.m_vec.move(); + return *this; + } + move_ref move() { return move_ref(*this); } + + std::map std_map() const { + std::map m; + for (size_t i = 0; i < m_vec.size(); ++i) { + const my_pair* dp = m_vec.data() + i; + m[dp->key] = dp->value; + } + return m; + } + + template + std::map std_map(KC convertKey, VC convertValue) const { + std::map m; + for (size_t i = 0; i < m_vec.size(); ++i) { + const my_pair* dp = m_vec.data() + i; + m[convertKey(dp->key)] = convertValue(dp->value); + } + return m; + } + + template + const my_pair* get(K2 key, int (*cmp)(K2, const K&)) const { + for (size_t i = 0; i < m_vec.size(); ++i) { + const my_pair* dp = m_vec.data() + i; + if (!cmp(key, dp->key)) return dp; + } + return 0; + } + + const my_pair* data() const { return m_vec.data(); } + + size_t size() const { return m_vec.size(); } +}; + +/* Invasive reference counting */ +template +class counting_ptr; + +class counted_object { + template + friend class counting_ptr; + + private: + int m_counter; + + public: + counted_object() : m_counter(0) {} + virtual ~counted_object() {} +}; + +template +class counted_wrapper : public counted_object { + private: + T m_object; + + public: + counted_wrapper(const T& o) : m_object(o) {} + T& operator()() { return m_object; } +}; + +template +class counting_ptr { + public: + typedef void (*destroy)(T*); + + private: + counted_object* m_ptr; + destroy m_destroy; + + inline void dec_and_destroy() { + if (m_ptr != 0 && --(m_ptr->m_counter) == 0) { + if (m_destroy == 0) { + delete m_ptr; + } else { + m_destroy((T*)m_ptr); + } + } + } + + public: + counting_ptr() : m_ptr(0), m_destroy(0) {} + counting_ptr(T* obj, destroy d = 0) : m_ptr(obj), m_destroy(d) { + counted_object* rc = obj; // no cast required + if (rc != 0) { + rc->m_counter++; + } + } + ~counting_ptr() { dec_and_destroy(); } + counting_ptr(const counting_ptr& o) : m_ptr(o.m_ptr), m_destroy(o.m_destroy) { + if (m_ptr != 0) { + m_ptr->m_counter++; + } + } + counting_ptr& operator=(const counting_ptr& o) { + dec_and_destroy(); + m_ptr = o.m_ptr; + m_destroy = o.m_destroy; + if (m_ptr != 0) { + m_ptr->m_counter++; + } + return *this; + } + counting_ptr& operator=(T* rc) { return reset(rc, 0); } + counting_ptr& reset(T* rc, destroy d) { + dec_and_destroy(); + m_ptr = rc; + m_destroy = d; + if (rc != 0) { + rc->m_counter++; + } + return *this; + } + T* get() { return (T*)m_ptr; } + const T* get() const { return (T*)m_ptr; } + T* operator->() { return (T*)m_ptr; } + const T* operator->() const { return (const T*)m_ptr; } +}; + +template +class local_ptr { + private: + typedef void (*destroy)(T*); + T* m_ptr; + destroy m_destroy; + + public: + local_ptr() : m_ptr(0), m_destroy(0) {} + local_ptr(const local_ptr&) + : m_ptr(0), m_destroy(0) {} // copying does not persist value + local_ptr& operator=(const local_ptr&) { return *this; } + ~local_ptr() { + if (m_ptr) m_destroy(m_ptr); + } + const T* get() const { return m_ptr; } + T* get() { return m_ptr; } + void set(T* ptr, void (*dtor)(T*)) { + if (m_ptr) m_destroy(m_ptr); + m_ptr = ptr; + m_destroy = dtor; + } +}; + +} // namespace portable + +#endif // INFINISPAN_HOTROD_PORTABLE_H \ No newline at end of file diff --git a/third_party/libwebrtc/include/base/refcount.h b/third_party/libwebrtc/include/base/refcount.h new file mode 100644 index 0000000000..6e3360e87b --- /dev/null +++ b/third_party/libwebrtc/include/base/refcount.h @@ -0,0 +1,29 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef LIB_WEBRTC_REFCOUNT_H_ +#define LIB_WEBRTC_REFCOUNT_H_ + +#include "refcountedobject.h" + +namespace libwebrtc { + +// Reference count interface. +class RefCountInterface { + public: + virtual int AddRef() const = 0; + virtual int Release() const = 0; + + protected: + virtual ~RefCountInterface() {} +}; + +} // namespace libwebrtc + +#endif // WEBRTC_BASE_REFCOUNT_H_ diff --git a/third_party/libwebrtc/include/base/refcountedobject.h b/third_party/libwebrtc/include/base/refcountedobject.h new file mode 100644 index 0000000000..9beea333f2 --- /dev/null +++ b/third_party/libwebrtc/include/base/refcountedobject.h @@ -0,0 +1,60 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef LIB_WEBRTC_REFCOUNTEDOBJECT_H_ +#define LIB_WEBRTC_REFCOUNTEDOBJECT_H_ + +#include + +#include "atomicops.h" + +namespace libwebrtc { + +template +class RefCountedObject : public T { + public: + RefCountedObject() {} + + template + explicit RefCountedObject(P0&& p0) : T(std::forward(p0)) {} + + template + RefCountedObject(P0&& p0, P1&& p1, Args&&... args) + : T(std::forward(p0), std::forward(p1), + std::forward(args)...) {} + + virtual int AddRef() const { return AtomicOps::Increment(&ref_count_); } + + virtual int Release() const { + int count = AtomicOps::Decrement(&ref_count_); + if (!count) { + delete this; + } + return count; + } + + // Return whether the reference count is one. If the reference count is used + // in the conventional way, a reference count of 1 implies that the current + // thread owns the reference and no other thread shares it. This call + // performs the test for a reference count of one, and performs the memory + // barrier needed for the owning thread to act on the object, knowing that it + // has exclusive access to the object. + virtual bool HasOneRef() const { + return AtomicOps::AcquireLoad(&ref_count_) == 1; + } + + protected: + virtual ~RefCountedObject() {} + + mutable volatile int ref_count_ = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_REFCOUNTEDOBJECT_H_ diff --git a/third_party/libwebrtc/include/base/scoped_ref_ptr.h b/third_party/libwebrtc/include/base/scoped_ref_ptr.h new file mode 100644 index 0000000000..9f00839cbe --- /dev/null +++ b/third_party/libwebrtc/include/base/scoped_ref_ptr.h @@ -0,0 +1,155 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Originally these classes are from Chromium. +// http://src.chromium.org/viewvc/chrome/trunk/src/base/memory/ref_counted.h?view=markup + +// +// A smart pointer class for reference counted objects. Use this class instead +// of calling AddRef and Release manually on a reference counted object to +// avoid common memory leaks caused by forgetting to Release an object +// reference. Sample usage: +// +// class MyFoo : public RefCounted { +// ... +// }; +// +// void some_function() { +// scoped_refptr foo = new MyFoo(); +// foo->Method(param); +// // |foo| is released when this function returns +// } +// +// void some_other_function() { +// scoped_refptr foo = new MyFoo(); +// ... +// foo = NULL; // explicitly releases |foo| +// ... +// if (foo) +// foo->Method(param); +// } +// +// The above examples show how scoped_refptr acts like a pointer to T. +// Given two scoped_refptr classes, it is also possible to exchange +// references between the two objects, like so: +// +// { +// scoped_refptr a = new MyFoo(); +// scoped_refptr b; +// +// b.swap(a); +// // now, |b| references the MyFoo object, and |a| references NULL. +// } +// +// To make both |a| and |b| in the above example reference the same MyFoo +// object, simply use the assignment operator: +// +// { +// scoped_refptr a = new MyFoo(); +// scoped_refptr b; +// +// b = a; +// // now, |a| and |b| each own a reference to the same MyFoo object. +// } +// + +#ifndef LIB_WEBRTC_SCOPED_REF_PTR_H_ +#define LIB_WEBRTC_SCOPED_REF_PTR_H_ + +#include + +namespace libwebrtc { + +template +class scoped_refptr { + public: + scoped_refptr() : ptr_(NULL) {} + + scoped_refptr(T* p) : ptr_(p) { + if (ptr_) ptr_->AddRef(); + } + + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) ptr_->AddRef(); + } + + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) ptr_->AddRef(); + } + + // Move constructors. + scoped_refptr(scoped_refptr&& r) : ptr_(r.release()) {} + + template + scoped_refptr(scoped_refptr&& r) : ptr_(r.release()) {} + + ~scoped_refptr() { + if (ptr_) ptr_->Release(); + } + + T* get() const { return ptr_; } + operator T*() const { return ptr_; } + T* operator->() const { return ptr_; } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + T* release() { + T* retVal = ptr_; + ptr_ = NULL; + return retVal; + } + + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) p->AddRef(); + if (ptr_) ptr_->Release(); + ptr_ = p; + return *this; + } + + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + + scoped_refptr& operator=(scoped_refptr&& r) { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + + template + scoped_refptr& operator=(scoped_refptr&& r) { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + + void swap(T** pp) { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + + void swap(scoped_refptr& r) { swap(&r.ptr_); } + + protected: + T* ptr_; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_SCOPED_REF_PTR_H_ diff --git a/third_party/libwebrtc/include/helper.h b/third_party/libwebrtc/include/helper.h new file mode 100644 index 0000000000..aa467afa91 --- /dev/null +++ b/third_party/libwebrtc/include/helper.h @@ -0,0 +1,22 @@ +#ifndef HELPER_HXX +#define HELPER_HXX + +#include "rtc_types.h" + +namespace libwebrtc { +/** + * @brief A helper class with static methods for generating random UUIDs. + * + */ +class Helper { + public: + /** + * @brief Generates a random UUID string. + * + * @return The generated UUID string. + */ + LIB_WEBRTC_API static string CreateRandomUuid(); +}; +} // namespace libwebrtc + +#endif // HELPER_HXX diff --git a/third_party/libwebrtc/include/libwebrtc.h b/third_party/libwebrtc/include/libwebrtc.h new file mode 100644 index 0000000000..f17af394c0 --- /dev/null +++ b/third_party/libwebrtc/include/libwebrtc.h @@ -0,0 +1,60 @@ +#ifndef LIB_WEBRTC_HXX +#define LIB_WEBRTC_HXX + +#include "rtc_peerconnection_factory.h" +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * @class LibWebRTC + * @brief Provides static methods for initializing, creating and terminating + * the WebRTC PeerConnectionFactory and threads. + * + * This class provides static methods for initializing, creating and terminating + * the WebRTC PeerConnectionFactory and threads. These methods are thread-safe + * and can be called from any thread. This class is not meant to be + * instantiated. + * + */ +class LibWebRTC { + public: + /** + * @brief Initializes the WebRTC PeerConnectionFactory and threads. + * + * Initializes the WebRTC PeerConnectionFactory and threads. This method is + * thread-safe and can be called from any thread. It initializes SSL and + * creates three threads: worker_thread, signaling_thread and network_thread. + * + * @return true if initialization is successful, false otherwise. + */ + LIB_WEBRTC_API static bool Initialize(); + + /** + * @brief Creates a new WebRTC PeerConnectionFactory. + * + * Creates a new WebRTC PeerConnectionFactory. This method is thread-safe and + * can be called from any thread. It creates a new instance of the + * RTCPeerConnectionFactoryImpl class and initializes it. + * + * @return A scoped_refptr object that points to the newly created + * RTCPeerConnectionFactory. + */ + LIB_WEBRTC_API static scoped_refptr + CreateRTCPeerConnectionFactory(); + + /** + * @brief Terminates the WebRTC PeerConnectionFactory and threads. + * + * Terminates the WebRTC PeerConnectionFactory and threads. This method is + * thread-safe and can be called from any thread. It cleans up SSL and stops + * and destroys the three threads: worker_thread, signaling_thread and + * network_thread. + * + */ + LIB_WEBRTC_API static void Terminate(); +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_device.h b/third_party/libwebrtc/include/rtc_audio_device.h new file mode 100644 index 0000000000..cff7b4a6ac --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_device.h @@ -0,0 +1,99 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_DEVICE_HXX +#define LIB_WEBRTC_RTC_AUDIO_DEVICE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioDevice class is an abstract class used for managing the audio + * devices used by WebRTC. It provides methods for device enumeration and + * selection. + */ +class RTCAudioDevice : public RefCountInterface { + public: + typedef fixed_size_function OnDeviceChangeCallback; + + public: + static const int kAdmMaxDeviceNameSize = 128; + static const int kAdmMaxFileNameSize = 512; + static const int kAdmMaxGuidSize = 128; + + public: + /** + * Returns the number of playout devices available. + * + * @return int16_t - The number of playout devices available. + */ + virtual int16_t PlayoutDevices() = 0; + + /** + * Returns the number of recording devices available. + * + * @return int16_t - The number of recording devices available. + */ + virtual int16_t RecordingDevices() = 0; + + /** + * Retrieves the name and GUID of the specified playout device. + * + * @param index - The index of the device. + * @param name - The device name. + * @param guid - The device GUID. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) = 0; + + /** + * Retrieves the name and GUID of the specified recording device. + * + * @param index - The index of the device. + * @param name - The device name. + * @param guid - The device GUID. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) = 0; + + /** + * Sets the playout device to use. + * + * @param index - The index of the device. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t SetPlayoutDevice(uint16_t index) = 0; + + /** + * Sets the recording device to use. + * + * @param index - The index of the device. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t SetRecordingDevice(uint16_t index) = 0; + + /** + * Registers a listener to be called when audio devices are added or removed. + * + * @param listener - The callback function to register. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t OnDeviceChange(OnDeviceChangeCallback listener) = 0; + + virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0; + + virtual int32_t MicrophoneVolume(uint32_t& volume) = 0; + + virtual int32_t SetSpeakerVolume(uint32_t volume) = 0; + + virtual int32_t SpeakerVolume(uint32_t& volume) = 0; + + protected: + virtual ~RTCAudioDevice() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_DEVICE_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_frame.h b/third_party/libwebrtc/include/rtc_audio_frame.h new file mode 100644 index 0000000000..3f276a1676 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_frame.h @@ -0,0 +1,108 @@ +#ifndef AUDIO_FRAME_HXX +#define AUDIO_FRAME_HXX + +#include "media_manager_types.h" + +namespace b2bua { + +class AudioFrame { + public: + /** + * @brief Creates a new instance of AudioFrame. + * @return AudioFrame*: a pointer to the newly created AudioFrame. + */ + MEDIA_MANAGER_API static AudioFrame* Create(); + + /** + * @brief Creates a new instance of AudioFrame with specified parameters. + * @param id: the unique identifier of the frame. + * @param timestamp: the timestamp of the frame. + * @param data: a pointer to the audio data buffer. + * @param samples_per_channel: the number of samples per channel. + * @param sample_rate_hz: the sample rate in Hz. + * @param num_channels: the number of audio channels. + * @return AudioFrame*: a pointer to the newly created AudioFrame. + */ + MEDIA_MANAGER_API static AudioFrame* Create(int id, uint32_t timestamp, + const int16_t* data, + size_t samples_per_channel, + int sample_rate_hz, + size_t num_channels = 1); + + /** + * @brief Releases the memory of this AudioFrame. + */ + virtual void Release() = 0; + + public: + /** + * @brief Updates the audio frame with specified parameters. + * @param id: the unique identifier of the frame. + * @param timestamp: the timestamp of the frame. + * @param data: a pointer to the audio data buffer. + * @param samples_per_channel: the number of samples per channel. + * @param sample_rate_hz: the sample rate in Hz. + * @param num_channels: the number of audio channels. + */ + virtual void UpdateFrame(int id, uint32_t timestamp, const int16_t* data, + size_t samples_per_channel, int sample_rate_hz, + size_t num_channels = 1) = 0; + + /** + * @brief Copies the contents of another AudioFrame. + * @param src: the source AudioFrame to copy from. + */ + virtual void CopyFrom(const AudioFrame& src) = 0; + + /** + * @brief Adds another AudioFrame to this one. + * @param frame_to_add: the AudioFrame to add. + */ + virtual void Add(const AudioFrame& frame_to_add) = 0; + + /** + * @brief Mutes the audio data in this AudioFrame. + */ + virtual void Mute() = 0; + + /** + * @brief Returns a pointer to the audio data buffer. + * @return const int16_t*: a pointer to the audio data buffer. + */ + virtual const int16_t* data() = 0; + + /** + * @brief Returns the number of samples per channel. + * @return size_t: the number of samples per channel. + */ + virtual size_t samples_per_channel() = 0; + + /** + * @brief Returns the sample rate in Hz. + * @return int: the sample rate in Hz. + */ + virtual int sample_rate_hz() = 0; + + /** + * @brief Returns the number of audio channels. + * @return size_t: the number of audio channels. + */ + virtual size_t num_channels() = 0; + + /** + * @brief Returns the timestamp of the AudioFrame. + * @return uint32_t: the timestamp of the AudioFrame. + */ + virtual uint32_t timestamp() = 0; + + /** + * @brief Returns the unique identifier of the AudioFrame. + * @return int: the unique identifier of the AudioFrame. + */ + + virtual int id() = 0; +}; + +}; // namespace b2bua + +#endif diff --git a/third_party/libwebrtc/include/rtc_audio_source.h b/third_party/libwebrtc/include/rtc_audio_source.h new file mode 100644 index 0000000000..43e39fd801 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_source.h @@ -0,0 +1,25 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX +#define LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioSource class is a base class for audio sources in WebRTC. + * Audio sources represent the source of audio data in WebRTC, such as a + * microphone or a file. This class provides a base interface for audio + * sources to implement, allowing them to be used with WebRTC's audio + * processing and transmission mechanisms. + */ +class RTCAudioSource : public RefCountInterface { + protected: + /** + * The destructor for the RTCAudioSource class. + */ + virtual ~RTCAudioSource() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_track.h b/third_party/libwebrtc/include/rtc_audio_track.h new file mode 100644 index 0000000000..c64e4bc4a5 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_track.h @@ -0,0 +1,28 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_TRACK_HXX +#define LIB_WEBRTC_RTC_AUDIO_TRACK_HXX + +#include "rtc_media_track.h" +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioTrack class represents an audio track in WebRTC. + * Audio tracks are used to transmit audio data over a WebRTC peer connection. + * This class is a subclass of the RTCMediaTrack class, which provides a base + * interface for all media tracks in WebRTC. + */ +class RTCAudioTrack : public RTCMediaTrack { + public: + // volume in [0-10] + virtual void SetVolume(double volume) = 0; + + protected: + /** + * The destructor for the RTCAudioTrack class. + */ + virtual ~RTCAudioTrack() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX diff --git a/third_party/libwebrtc/include/rtc_data_channel.h b/third_party/libwebrtc/include/rtc_data_channel.h new file mode 100644 index 0000000000..e1351959f2 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_data_channel.h @@ -0,0 +1,124 @@ +#ifndef LIB_WEBRTC_RTC_DATA_CHANNEL_HXX +#define LIB_WEBRTC_RTC_DATA_CHANNEL_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCDataChannelState enum represents the possible states of a WebRTC data + * channel. Data channels are used to transmit non-audio/video data over a + * WebRTC peer connection. The possible states are: connecting, open, closing, + * and closed. + */ +enum RTCDataChannelState { + RTCDataChannelConnecting, + RTCDataChannelOpen, + RTCDataChannelClosing, + RTCDataChannelClosed, +}; + +/** + * The RTCDataChannelInit struct represents the configuration options for a + * WebRTC data channel. These options include whether the channel is ordered and + * reliable, the maximum retransmit time and number of retransmits, the protocol + * to use (sctp or quic), whether the channel is negotiated, and the channel ID. + */ +struct RTCDataChannelInit { + bool ordered = true; + bool reliable = true; + int maxRetransmitTime = -1; + int maxRetransmits = -1; + string protocol = {"sctp"}; // sctp | quic + bool negotiated = false; + int id = 0; +}; + +/** + * The RTCDataChannelObserver class is an interface for receiving events related + * to a WebRTC data channel. These events include changes in the channel's state + * and incoming messages. + */ +class RTCDataChannelObserver { + public: + /** + * Called when the state of the data channel changes. + * The new state is passed as a parameter. + */ + virtual void OnStateChange(RTCDataChannelState state) = 0; + + /** + * Called when a message is received on the data channel. + * The message buffer, its length, and a boolean indicating whether the + * message is binary are passed as parameters. + */ + virtual void OnMessage(const char* buffer, int length, bool binary) = 0; + + protected: + /** + * The destructor for the RTCDataChannelObserver class. + */ + virtual ~RTCDataChannelObserver() = default; +}; + +/** + * The RTCDataChannel class represents a data channel in WebRTC. + * Data channels are used to transmit non-audio/video data over a WebRTC peer + * connection. This class provides a base interface for data channels to + * implement, allowing them to be used with WebRTC's data channel mechanisms. + */ +class RTCDataChannel : public RefCountInterface { + public: + /** + * Sends data over the data channel. + * The data buffer, its size, and a boolean indicating whether the data is + * binary are passed as parameters. + */ + virtual void Send(const uint8_t* data, uint32_t size, + bool binary = false) = 0; + + /** + * Closes the data channel. + */ + virtual void Close() = 0; + + /** + * Registers an observer for events related to the data channel. + * The observer object is passed as a parameter. + */ + virtual void RegisterObserver(RTCDataChannelObserver* observer) = 0; + + /** + * Unregisters the current observer for the data channel. + */ + virtual void UnregisterObserver() = 0; + + /** + * Returns the label of the data channel. + */ + virtual const string label() const = 0; + + /** + * Returns the ID of the data channel. + */ + virtual int id() const = 0; + + /** + * Returns the amount of data buffered in the data channel. + * + * @return uint64_t + */ + virtual uint64_t buffered_amount() const = 0; + + /** + * Returns the state of the data channel. + */ + virtual RTCDataChannelState state() = 0; + + protected: + virtual ~RTCDataChannel() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_DATA_CHANNEL_HXX diff --git a/third_party/libwebrtc/include/rtc_desktop_capturer.h b/third_party/libwebrtc/include/rtc_desktop_capturer.h new file mode 100644 index 0000000000..9d2e955ab3 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_desktop_capturer.h @@ -0,0 +1,138 @@ +/** + * @file rtc_desktop_capturer.h + * This header file defines the interface for capturing desktop media. + */ + +#ifndef LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX +#define LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX + +#include "rtc_desktop_media_list.h" +#include "rtc_types.h" +#include "rtc_video_device.h" + +namespace libwebrtc { + +class DesktopCapturerObserver; + +/** + * @brief The interface for capturing desktop media. + * + * This interface defines methods for registering and deregistering observer + * for desktop capture events, starting and stopping desktop capture, and + * retrieving the current capture state and media source. + */ +class RTCDesktopCapturer : public RefCountInterface { + public: + /** + * @brief Enumeration for the possible states of desktop capture. + */ + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED }; + + public: + /** + * @brief Registers the given observer for desktop capture events. + * + * @param observer Pointer to the observer to be registered. + */ + virtual void RegisterDesktopCapturerObserver( + DesktopCapturerObserver* observer) = 0; + + /** + * @brief Deregisters the currently registered desktop capture observer. + */ + virtual void DeRegisterDesktopCapturerObserver() = 0; + + /** + * @brief Starts desktop capture with the given frame rate. + * + * @param fps The desired frame rate. + * + * @return The current capture state after attempting to start capture. + */ + virtual CaptureState Start(uint32_t fps) = 0; + + /** + * @brief Starts desktop capture with the given frame rate and capture + * dimensions. + * + * @param fps The desired frame rate. + * @param x The left-most pixel coordinate of the capture region. + * @param y The top-most pixel coordinate of the capture region. + * @param w The width of the capture region. + * @param h The height of the capture region. + * + * @return The current capture state after attempting to start capture. + */ + virtual CaptureState Start(uint32_t fps, uint32_t x, uint32_t y, uint32_t w, + uint32_t h) = 0; + + /** + * @brief Stops desktop capture. + */ + virtual void Stop() = 0; + + /** + * @brief Checks if desktop capture is currently running. + * + * @return True if capture is running, false otherwise. + */ + virtual bool IsRunning() = 0; + + /** + * @brief Retrieves the media source for the current desktop capture. + * + * @return A scoped_refptr representing the current capture + * media source. + */ + virtual scoped_refptr source() = 0; + + /** + * @brief Destroys the RTCDesktopCapturer object. + */ + virtual ~RTCDesktopCapturer() {} +}; + +/** + * @brief Observer interface for desktop capturer events. + * + * This class defines the interface for an observer of the DesktopCapturer + * class, allowing clients to be notified of events such as when capturing + * begins or ends, and when an error occurs. + */ +class DesktopCapturerObserver { + public: + /** + * @brief Called when desktop capture starts. + * + * @param capturer A reference to the capturer that started capturing. + */ + virtual void OnStart(scoped_refptr capturer) = 0; + + /** + * @brief Called when desktop capture is paused. + * + * @param capturer A reference to the capturer that paused capturing. + */ + virtual void OnPaused(scoped_refptr capturer) = 0; + + /** + * @brief Called when desktop capture stops. + * + * @param capturer A reference to the capturer that stopped capturing. + */ + virtual void OnStop(scoped_refptr capturer) = 0; + + /** + * @brief Called when an error occurs during desktop capture. + * + * @param capturer A reference to the capturer that encountered an error. + */ + virtual void OnError(scoped_refptr capturer) = 0; + + protected: + ~DesktopCapturerObserver() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX diff --git a/third_party/libwebrtc/include/rtc_desktop_device.h b/third_party/libwebrtc/include/rtc_desktop_device.h new file mode 100644 index 0000000000..e3e4c6fab1 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_desktop_device.h @@ -0,0 +1,25 @@ +#ifndef LIB_WEBRTC_RTC_DESKTOP_DEVICE_HXX +#define LIB_WEBRTC_RTC_DESKTOP_DEVICE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class MediaSource; +class RTCDesktopCapturer; +class RTCDesktopMediaList; + +class RTCDesktopDevice : public RefCountInterface { + public: + virtual scoped_refptr CreateDesktopCapturer( + scoped_refptr source) = 0; + virtual scoped_refptr GetDesktopMediaList( + DesktopType type) = 0; + + protected: + virtual ~RTCDesktopDevice() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_desktop_media_list.h b/third_party/libwebrtc/include/rtc_desktop_media_list.h new file mode 100644 index 0000000000..48faf2a959 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_desktop_media_list.h @@ -0,0 +1,66 @@ +#ifndef LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX +#define LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class MediaSource : public RefCountInterface { + public: + // source id + virtual string id() const = 0; + + // source name + virtual string name() const = 0; + + // Returns the thumbnail of the source, jpeg format. + virtual portable::vector thumbnail() const = 0; + + virtual DesktopType type() const = 0; + + virtual bool UpdateThumbnail() = 0; + + protected: + virtual ~MediaSource() {} +}; + +class MediaListObserver { + public: + virtual void OnMediaSourceAdded(scoped_refptr source) = 0; + + virtual void OnMediaSourceRemoved(scoped_refptr source) = 0; + + virtual void OnMediaSourceNameChanged(scoped_refptr source) = 0; + + virtual void OnMediaSourceThumbnailChanged( + scoped_refptr source) = 0; + + protected: + virtual ~MediaListObserver() {} +}; + +class RTCDesktopMediaList : public RefCountInterface { + public: + virtual void RegisterMediaListObserver(MediaListObserver* observer) = 0; + + virtual void DeRegisterMediaListObserver() = 0; + + virtual DesktopType type() const = 0; + + virtual int32_t UpdateSourceList(bool force_reload = false, + bool get_thumbnail = true) = 0; + + virtual int GetSourceCount() const = 0; + + virtual scoped_refptr GetSource(int index) = 0; + + virtual bool GetThumbnail(scoped_refptr source, + bool notify = false) = 0; + + protected: + ~RTCDesktopMediaList() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_dtls_transport.h b/third_party/libwebrtc/include/rtc_dtls_transport.h new file mode 100644 index 0000000000..fe7adad7f0 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_dtls_transport.h @@ -0,0 +1,51 @@ +#ifndef LIB_WEBRTC_DTLS_TRANSPORT_H_ +#define LIB_WEBRTC_DTLS_TRANSPORT_H_ + +#include "base/refcount.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCDtlsTransportInformation : public RefCountInterface { + public: + enum class RTCDtlsTransportState { + kNew, // Has not started negotiating yet. + kConnecting, // In the process of negotiating a secure connection. + kConnected, // Completed negotiation and verified fingerprints. + kClosed, // Intentionally closed. + kFailed, // Failure due to an error or failing to verify a remote + // fingerprint. + kNumValues + }; + virtual RTCDtlsTransportInformation& operator=( + scoped_refptr c) = 0; + + virtual RTCDtlsTransportState state() const = 0; + virtual int ssl_cipher_suite() const = 0; + virtual int srtp_cipher_suite() const = 0; +}; + +class RTCDtlsTransportObserver { + public: + virtual void OnStateChange(RTCDtlsTransportInformation info) = 0; + + virtual void OnError(const int type, const char* message) = 0; + + protected: + virtual ~RTCDtlsTransportObserver() = default; +}; + +class RTCDtlsTransport : public RefCountInterface { + LIB_WEBRTC_API static scoped_refptr Create(); + + public: + virtual scoped_refptr GetInformation() = 0; + + virtual void RegisterObserver(RTCDtlsTransportObserver* observer) = 0; + + virtual void UnregisterObserver() = 0; +}; + +} // namespace libwebrtc + +#endif // API_DTLS_TRANSPORT_INTERFACE_H_ diff --git a/third_party/libwebrtc/include/rtc_dtmf_sender.h b/third_party/libwebrtc/include/rtc_dtmf_sender.h new file mode 100644 index 0000000000..7f42cc56b5 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_dtmf_sender.h @@ -0,0 +1,47 @@ + +#ifndef LIB_WEBRTC_DTMF_SENDER__H_ +#define LIB_WEBRTC_DTMF_SENDER__H_ + +#include "base/refcount.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCDtmfSenderObserver { + public: + virtual void OnToneChange(const string tone, const string tone_buffer) = 0; + + virtual void OnToneChange(const string tone) = 0; + + protected: + virtual ~RTCDtmfSenderObserver() = default; +}; + +class RTCDtmfSender : public RefCountInterface { + public: + static const int kDtmfDefaultCommaDelayMs = 2000; + + virtual void RegisterObserver(RTCDtmfSenderObserver* observer) = 0; + + virtual void UnregisterObserver() = 0; + + virtual bool InsertDtmf(const string tones, int duration, + int inter_tone_gap) = 0; + + virtual bool InsertDtmf(const string tones, int duration, int inter_tone_gap, + int comma_delay) = 0; + + virtual bool CanInsertDtmf() = 0; + + virtual const string tones() const = 0; + + virtual int duration() const = 0; + + virtual int inter_tone_gap() const = 0; + + virtual int comma_delay() const = 0; +}; + +} // namespace libwebrtc + +#endif // API_DTMF_SENDER__H_ diff --git a/third_party/libwebrtc/include/rtc_frame_cryptor.h b/third_party/libwebrtc/include/rtc_frame_cryptor.h new file mode 100644 index 0000000000..47f3a409b7 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_frame_cryptor.h @@ -0,0 +1,139 @@ +#ifndef LIB_RTC_FRAME_CYRPTOR_H_ +#define LIB_RTC_FRAME_CYRPTOR_H_ + +#include "base/refcount.h" +#include "rtc_peerconnection_factory.h" +#include "rtc_rtp_receiver.h" +#include "rtc_rtp_sender.h" +#include "rtc_types.h" + +namespace libwebrtc { + +enum class Algorithm { + kAesGcm = 0, + kAesCbc, +}; + +#define DEFAULT_KEYRING_SIZE 16 +#define MAX_KEYRING_SIZE 255 + +struct KeyProviderOptions { + bool shared_key; + vector ratchet_salt; + vector uncrypted_magic_bytes; + int ratchet_window_size; + int failure_tolerance; + // The size of the key ring. between 1 and 255. + int key_ring_size; + bool discard_frame_when_cryptor_not_ready; + KeyProviderOptions() + : shared_key(false), + ratchet_salt(vector()), + ratchet_window_size(0), + failure_tolerance(-1), + key_ring_size(DEFAULT_KEYRING_SIZE), + discard_frame_when_cryptor_not_ready(false) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + ratchet_window_size(copy.ratchet_window_size), + failure_tolerance(copy.failure_tolerance), + key_ring_size(copy.key_ring_size) {} +}; + +/// Shared secret key for frame encryption. +class KeyProvider : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(KeyProviderOptions*); + + virtual bool SetSharedKey(int index, vector key) = 0; + + virtual vector RatchetSharedKey(int key_index) = 0; + + virtual vector ExportSharedKey(int key_index) = 0; + + /// Set the key at the given index. + virtual bool SetKey(const string participant_id, int index, + vector key) = 0; + + virtual vector RatchetKey(const string participant_id, + int key_index) = 0; + + virtual vector ExportKey(const string participant_id, + int key_index) = 0; + + virtual void SetSifTrailer(vector trailer) = 0; + + protected: + virtual ~KeyProvider() {} +}; + +enum RTCFrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class RTCFrameCryptorObserver : public RefCountInterface { + public: + virtual void OnFrameCryptionStateChanged(const string participant_id, + RTCFrameCryptionState state) = 0; + + protected: + virtual ~RTCFrameCryptorObserver() {} +}; + +/// Frame encryption/decryption. +/// +class RTCFrameCryptor : public RefCountInterface { + public: + /// Enable/Disable frame crypto for the sender or receiver. + virtual bool SetEnabled(bool enabled) = 0; + + /// Get the enabled state for the sender or receiver. + virtual bool enabled() const = 0; + + /// Set the key index for the sender or receiver. + /// If the key index is not set, the key index will be set to 0. + virtual bool SetKeyIndex(int index) = 0; + + /// Get the key index for the sender or receiver. + virtual int key_index() const = 0; + + virtual const string participant_id() const = 0; + + virtual void RegisterRTCFrameCryptorObserver( + scoped_refptr observer) = 0; + + virtual void DeRegisterRTCFrameCryptorObserver() = 0; + + protected: + virtual ~RTCFrameCryptor() {} +}; + +class FrameCryptorFactory { + public: + /// Create a frame cyrptor for [RTCRtpSender]. + LIB_WEBRTC_API static scoped_refptr + frameCryptorFromRtpSender(scoped_refptr factory, + const string participant_id, + scoped_refptr sender, + Algorithm algorithm, + scoped_refptr key_provider); + + /// Create a frame cyrptor for [RTCRtpReceiver]. + LIB_WEBRTC_API static scoped_refptr + frameCryptorFromRtpReceiver(scoped_refptr factory, + const string participant_id, + scoped_refptr receiver, + Algorithm algorithm, + scoped_refptr key_provider); +}; + +} // namespace libwebrtc + +#endif // LIB_RTC_FRAME_CYRPTOR_H_ \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_ice_candidate.h b/third_party/libwebrtc/include/rtc_ice_candidate.h new file mode 100644 index 0000000000..82da46edb4 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_ice_candidate.h @@ -0,0 +1,29 @@ +#ifndef LIB_WEBRTC_RTC_ICE_CANDIDATE_HXX +#define LIB_WEBRTC_RTC_ICE_CANDIDATE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCIceCandidate : public RefCountInterface { + public: + static LIB_WEBRTC_API scoped_refptr Create( + const string sdp, const string sdp_mid, int sdp_mline_index, + SdpParseError* error); + + public: + virtual const string candidate() const = 0; + + virtual const string sdp_mid() const = 0; + + virtual int sdp_mline_index() const = 0; + + virtual bool ToString(string& out) = 0; + + protected: + virtual ~RTCIceCandidate() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_ICE_CANDIDATE_HXX diff --git a/third_party/libwebrtc/include/rtc_ice_transport.h b/third_party/libwebrtc/include/rtc_ice_transport.h new file mode 100644 index 0000000000..645f305b2d --- /dev/null +++ b/third_party/libwebrtc/include/rtc_ice_transport.h @@ -0,0 +1,75 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LIB_WEBRTC_RTC_ICE_TRANSPORT_H_ +#define LIB_WEBRTC_RTC_ICE_TRANSPORT_H_ + +#include + +#include "api/async_dns_resolver.h" +#include "api/async_resolver_factory.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" + +namespace libwebrtc { + +class IceTransport : public RefCountInterface { + public: + virtual IceTransport* internal() = 0; +}; + +class IceTransportInit final { + public: + IceTransportInit() = default; + IceTransportInit(const IceTransportInit&) = delete; + IceTransportInit(IceTransportInit&&) = default; + IceTransportInit& operator=(const IceTransportInit&) = delete; + IceTransportInit& operator=(IceTransportInit&&) = default; + + cricket::PortAllocator* port_allocator() { return port_allocator_; } + void set_port_allocator(cricket::PortAllocator* port_allocator) { + port_allocator_ = port_allocator; + } + + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory() { + return async_dns_resolver_factory_; + } + void set_async_dns_resolver_factory( + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory) { + RTC_DCHECK(!async_resolver_factory_); + async_dns_resolver_factory_ = async_dns_resolver_factory; + } + AsyncResolverFactory* async_resolver_factory() { + return async_resolver_factory_; + } + ABSL_DEPRECATED("bugs.webrtc.org/12598") + void set_async_resolver_factory( + AsyncResolverFactory* async_resolver_factory) { + RTC_DCHECK(!async_dns_resolver_factory_); + async_resolver_factory_ = async_resolver_factory; + } + + RtcEventLog* event_log() { return event_log_; } + void set_event_log(RtcEventLog* event_log) { event_log_ = event_log; } +}; + +class IceTransportFactory { + public: + virtual ~IceTransportFactory() = default; + + virtual scoped_refptr CreateIceTransport( + const std::string& transport_name, int component, + IceTransportInit init) = 0; +}; + +} // namespace libwebrtc +#endif // API_ICE_TRANSPORT_INTERFACE_H_ diff --git a/third_party/libwebrtc/include/rtc_media_stream.h b/third_party/libwebrtc/include/rtc_media_stream.h new file mode 100644 index 0000000000..17c04d1ba4 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_media_stream.h @@ -0,0 +1,42 @@ +#ifndef LIB_WEBRTC_RTC_MEDIA_STREAM_HXX +#define LIB_WEBRTC_RTC_MEDIA_STREAM_HXX + +#include "rtc_audio_track.h" +#include "rtc_types.h" +#include "rtc_video_track.h" + +namespace libwebrtc { + +class RTCMediaStream : public RefCountInterface { + public: + virtual bool AddTrack(scoped_refptr track) = 0; + + virtual bool AddTrack(scoped_refptr track) = 0; + + virtual bool RemoveTrack(scoped_refptr track) = 0; + + virtual bool RemoveTrack(scoped_refptr track) = 0; + + virtual vector> audio_tracks() = 0; + + virtual vector> video_tracks() = 0; + + virtual vector> tracks() = 0; + + virtual scoped_refptr FindAudioTrack( + const string track_id) = 0; + + virtual scoped_refptr FindVideoTrack( + const string track_id) = 0; + + virtual const string label() = 0; + + virtual const string id() = 0; + + protected: + ~RTCMediaStream() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_STREAM_HXX diff --git a/third_party/libwebrtc/include/rtc_media_track.h b/third_party/libwebrtc/include/rtc_media_track.h new file mode 100644 index 0000000000..ff5a2f743e --- /dev/null +++ b/third_party/libwebrtc/include/rtc_media_track.h @@ -0,0 +1,34 @@ +#ifndef LIB_WEBRTC_RTC_MEDIA_TRACK_HXX +#define LIB_WEBRTC_RTC_MEDIA_TRACK_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/*Media Track interface*/ +class RTCMediaTrack : public RefCountInterface { + public: + enum RTCTrackState { + kLive, + kEnded, + }; + virtual RTCTrackState state() const = 0; + + /*track type: audio/video*/ + virtual const string kind() const = 0; + + /*track id*/ + virtual const string id() const = 0; + + virtual bool enabled() const = 0; + + /*mute track*/ + virtual bool set_enabled(bool enable) = 0; + + protected: + ~RTCMediaTrack() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_TRACK_HXX diff --git a/third_party/libwebrtc/include/rtc_mediaconstraints.h b/third_party/libwebrtc/include/rtc_mediaconstraints.h new file mode 100644 index 0000000000..2596e6153f --- /dev/null +++ b/third_party/libwebrtc/include/rtc_mediaconstraints.h @@ -0,0 +1,79 @@ +#ifndef LIB_WEBRTC_RTC_MEDIA_CONSTRAINTS_HXX +#define LIB_WEBRTC_RTC_MEDIA_CONSTRAINTS_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCMediaConstraints : public RefCountInterface { + public: + // These keys are google specific. + LIB_WEBRTC_API static const char* + kGoogEchoCancellation; // googEchoCancellation + + LIB_WEBRTC_API static const char* + kExtendedFilterEchoCancellation; // googEchoCancellation2 + LIB_WEBRTC_API static const char* + kDAEchoCancellation; // googDAEchoCancellation + LIB_WEBRTC_API static const char* kAutoGainControl; // googAutoGainControl + LIB_WEBRTC_API static const char* kNoiseSuppression; // googNoiseSuppression + LIB_WEBRTC_API static const char* kHighpassFilter; // googHighpassFilter + LIB_WEBRTC_API static const char* kAudioMirroring; // googAudioMirroring + LIB_WEBRTC_API static const char* + kAudioNetworkAdaptorConfig; // goodAudioNetworkAdaptorConfig + + // Constraint keys for CreateOffer / CreateAnswer + // Specified by the W3C PeerConnection spec + LIB_WEBRTC_API static const char* + kOfferToReceiveVideo; // OfferToReceiveVideo + LIB_WEBRTC_API static const char* + kOfferToReceiveAudio; // OfferToReceiveAudio + LIB_WEBRTC_API static const char* + kVoiceActivityDetection; // VoiceActivityDetection + LIB_WEBRTC_API static const char* kIceRestart; // IceRestart + // These keys are google specific. + LIB_WEBRTC_API static const char* kUseRtpMux; // googUseRtpMUX + + // Constraints values. + LIB_WEBRTC_API static const char* kValueTrue; // true + LIB_WEBRTC_API static const char* kValueFalse; // false + + // PeerConnection constraint keys. + // Temporary pseudo-constraints used to enable DataChannels + LIB_WEBRTC_API static const char* + kEnableRtpDataChannels; // Enable RTP DataChannels + // Google-specific constraint keys. + // Temporary pseudo-constraint for enabling DSCP through JS. + LIB_WEBRTC_API static const char* kEnableDscp; // googDscp + // Constraint to enable IPv6 through JS. + LIB_WEBRTC_API static const char* kEnableIPv6; // googIPv6 + // Temporary constraint to enable suspend below min bitrate feature. + LIB_WEBRTC_API static const char* kEnableVideoSuspendBelowMinBitrate; + // googSuspendBelowMinBitrate + // Constraint to enable combined audio+video bandwidth estimation. + //LIB_WEBRTC_API static const char* + // kCombinedAudioVideoBwe; // googCombinedAudioVideoBwe + LIB_WEBRTC_API static const char* + kScreencastMinBitrate; // googScreencastMinBitrate + LIB_WEBRTC_API static const char* + kCpuOveruseDetection; // googCpuOveruseDetection + + // Specifies number of simulcast layers for all video tracks + // with a Plan B offer/answer + // (see RTCOfferAnswerOptions::num_simulcast_layers). + LIB_WEBRTC_API static const char* kNumSimulcastLayers; + + public: + LIB_WEBRTC_API static scoped_refptr Create(); + + virtual void AddMandatoryConstraint(const string key, const string value) = 0; + + virtual void AddOptionalConstraint(const string key, const string value) = 0; + + protected: + virtual ~RTCMediaConstraints() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_CONSTRAINTS_HXX diff --git a/third_party/libwebrtc/include/rtc_peerconnection.h b/third_party/libwebrtc/include/rtc_peerconnection.h new file mode 100644 index 0000000000..35ea2ad3ea --- /dev/null +++ b/third_party/libwebrtc/include/rtc_peerconnection.h @@ -0,0 +1,273 @@ +#ifndef LIB_WEBRTC_RTC_PEERCONNECTION_HXX +#define LIB_WEBRTC_RTC_PEERCONNECTION_HXX + +#include "rtc_audio_track.h" +#include "rtc_data_channel.h" +#include "rtc_ice_candidate.h" +#include "rtc_media_stream.h" +#include "rtc_mediaconstraints.h" +#include "rtc_rtp_sender.h" +#include "rtc_rtp_transceiver.h" +#include "rtc_session_description.h" +#include "rtc_video_source.h" +#include "rtc_video_track.h" + +namespace libwebrtc { + +enum SessionDescriptionErrorType { + kPeerConnectionInitFailed = 0, + kCreatePeerConnectionFailed, + kSDPParseFailed, +}; + +enum RTCPeerConnectionState { + RTCPeerConnectionStateNew, + RTCPeerConnectionStateConnecting, + RTCPeerConnectionStateConnected, + RTCPeerConnectionStateDisconnected, + RTCPeerConnectionStateFailed, + RTCPeerConnectionStateClosed, +}; + +enum RTCSignalingState { + RTCSignalingStateStable, + RTCSignalingStateHaveLocalOffer, + RTCSignalingStateHaveRemoteOffer, + RTCSignalingStateHaveLocalPrAnswer, + RTCSignalingStateHaveRemotePrAnswer, + RTCSignalingStateClosed +}; + +enum RTCIceGatheringState { + RTCIceGatheringStateNew, + RTCIceGatheringStateGathering, + RTCIceGatheringStateComplete +}; + +enum RTCIceConnectionState { + RTCIceConnectionStateNew, + RTCIceConnectionStateChecking, + RTCIceConnectionStateCompleted, + RTCIceConnectionStateConnected, + RTCIceConnectionStateFailed, + RTCIceConnectionStateDisconnected, + RTCIceConnectionStateClosed, + RTCIceConnectionStateMax, +}; + +class RTCStatsMember : public RefCountInterface { + public: + // Member value types. + enum Type { + kBool, // bool + kInt32, // int32_t + kUint32, // uint32_t + kInt64, // int64_t + kUint64, // uint64_t + kDouble, // double + kString, // std::string + + kSequenceBool, // std::vector + kSequenceInt32, // std::vector + kSequenceUint32, // std::vector + kSequenceInt64, // std::vector + kSequenceUint64, // std::vector + kSequenceDouble, // std::vector + kSequenceString, // std::vector + + kMapStringUint64, // std::map + kMapStringDouble, // std::map + }; + + public: + virtual string GetName() const = 0; + virtual Type GetType() const = 0; + virtual bool IsDefined() const = 0; + + virtual bool ValueBool() const = 0; + virtual int32_t ValueInt32() const = 0; + virtual uint32_t ValueUint32() const = 0; + virtual int64_t ValueInt64() const = 0; + virtual uint64_t ValueUint64() const = 0; + virtual double ValueDouble() const = 0; + virtual string ValueString() const = 0; + virtual vector ValueSequenceBool() const = 0; + virtual vector ValueSequenceInt32() const = 0; + virtual vector ValueSequenceUint32() const = 0; + virtual vector ValueSequenceInt64() const = 0; + virtual vector ValueSequenceUint64() const = 0; + virtual vector ValueSequenceDouble() const = 0; + virtual vector ValueSequenceString() const = 0; + virtual map ValueMapStringUint64() const = 0; + virtual map ValueMapStringDouble() const = 0; + + protected: + virtual ~RTCStatsMember() {} +}; + +class MediaRTCStats : public RefCountInterface { + public: + virtual const string id() = 0; + + virtual const string type() = 0; + + virtual int64_t timestamp_us() = 0; + + virtual const string ToJson() = 0; + + virtual const vector> Members() = 0; +}; + +typedef fixed_size_function> reports)> + OnStatsCollectorSuccess; + +typedef fixed_size_function OnStatsCollectorFailure; + +typedef fixed_size_function + OnSdpCreateSuccess; + +typedef fixed_size_function OnSdpCreateFailure; + +typedef fixed_size_function OnSetSdpSuccess; + +typedef fixed_size_function OnSetSdpFailure; + +typedef fixed_size_function + OnGetSdpSuccess; + +typedef fixed_size_function OnGetSdpFailure; + +class RTCPeerConnectionObserver { + public: + virtual void OnSignalingState(RTCSignalingState state) = 0; + + virtual void OnPeerConnectionState(RTCPeerConnectionState state) = 0; + + virtual void OnIceGatheringState(RTCIceGatheringState state) = 0; + + virtual void OnIceConnectionState(RTCIceConnectionState state) = 0; + + virtual void OnIceCandidate(scoped_refptr candidate) = 0; + + virtual void OnAddStream(scoped_refptr stream) = 0; + + virtual void OnRemoveStream(scoped_refptr stream) = 0; + + virtual void OnDataChannel(scoped_refptr data_channel) = 0; + + virtual void OnRenegotiationNeeded() = 0; + + virtual void OnTrack(scoped_refptr transceiver) = 0; + + virtual void OnAddTrack(vector> streams, + scoped_refptr receiver) = 0; + + virtual void OnRemoveTrack(scoped_refptr receiver) = 0; + + protected: + virtual ~RTCPeerConnectionObserver() {} +}; + +class RTCPeerConnection : public RefCountInterface { + public: + virtual int AddStream(scoped_refptr stream) = 0; + + virtual int RemoveStream(scoped_refptr stream) = 0; + + virtual scoped_refptr CreateLocalMediaStream( + const string stream_id) = 0; + + virtual scoped_refptr CreateDataChannel( + const string label, RTCDataChannelInit* dataChannelDict) = 0; + + virtual void CreateOffer(OnSdpCreateSuccess success, + OnSdpCreateFailure failure, + scoped_refptr constraints) = 0; + + virtual void CreateAnswer(OnSdpCreateSuccess success, + OnSdpCreateFailure failure, + scoped_refptr constraints) = 0; + + virtual void RestartIce() = 0; + + virtual void Close() = 0; + + virtual void SetLocalDescription(const string sdp, const string type, + OnSetSdpSuccess success, + OnSetSdpFailure failure) = 0; + + virtual void SetRemoteDescription(const string sdp, const string type, + OnSetSdpSuccess success, + OnSetSdpFailure failure) = 0; + + virtual void GetLocalDescription(OnGetSdpSuccess success, + OnGetSdpFailure failure) = 0; + + virtual void GetRemoteDescription(OnGetSdpSuccess success, + OnGetSdpFailure failure) = 0; + + virtual void AddCandidate(const string mid, int mid_mline_index, + const string candiate) = 0; + + virtual void RegisterRTCPeerConnectionObserver( + RTCPeerConnectionObserver* observer) = 0; + + virtual void DeRegisterRTCPeerConnectionObserver() = 0; + + virtual vector> local_streams() = 0; + + virtual vector> remote_streams() = 0; + + virtual bool GetStats(scoped_refptr sender, + OnStatsCollectorSuccess success, + OnStatsCollectorFailure failure) = 0; + + virtual bool GetStats(scoped_refptr receiver, + OnStatsCollectorSuccess success, + OnStatsCollectorFailure failure) = 0; + + virtual void GetStats(OnStatsCollectorSuccess success, + OnStatsCollectorFailure failure) = 0; + + virtual scoped_refptr AddTransceiver( + scoped_refptr track, + scoped_refptr init) = 0; + + virtual scoped_refptr AddTransceiver( + scoped_refptr track) = 0; + + virtual scoped_refptr AddTrack( + scoped_refptr track, const vector streamIds) = 0; + + virtual scoped_refptr AddTransceiver( + RTCMediaType media_type) = 0; + + virtual scoped_refptr AddTransceiver( + RTCMediaType media_type, scoped_refptr init) = 0; + + virtual bool RemoveTrack(scoped_refptr render) = 0; + + virtual vector> senders() = 0; + + virtual vector> transceivers() = 0; + + virtual vector> receivers() = 0; + + virtual RTCSignalingState signaling_state() = 0; + + virtual RTCIceConnectionState ice_connection_state() = 0; + + virtual RTCIceConnectionState standardized_ice_connection_state() = 0; + + virtual RTCPeerConnectionState peer_connection_state() = 0; + + virtual RTCIceGatheringState ice_gathering_state() = 0; + + protected: + virtual ~RTCPeerConnection() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_PEERCONNECTION_HXX diff --git a/third_party/libwebrtc/include/rtc_peerconnection_factory.h b/third_party/libwebrtc/include/rtc_peerconnection_factory.h new file mode 100644 index 0000000000..cb024672c2 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_peerconnection_factory.h @@ -0,0 +1,70 @@ +#ifndef LIB_WEBRTC_RTC_PEERCONNECTION_FACTORY_HXX +#define LIB_WEBRTC_RTC_PEERCONNECTION_FACTORY_HXX + +#include "rtc_audio_source.h" +#include "rtc_audio_track.h" +#include "rtc_types.h" +#ifdef RTC_DESKTOP_DEVICE +#include "rtc_desktop_device.h" +#endif +#include "rtc_media_stream.h" +#include "rtc_mediaconstraints.h" +#include "rtc_video_device.h" +#include "rtc_video_source.h" + +namespace libwebrtc { + +class RTCPeerConnection; +class RTCAudioDevice; +class RTCVideoDevice; +class RTCRtpCapabilities; + +class RTCPeerConnectionFactory : public RefCountInterface { + public: + virtual bool Initialize() = 0; + + virtual bool Terminate() = 0; + + virtual scoped_refptr Create( + const RTCConfiguration& configuration, + scoped_refptr constraints) = 0; + + virtual void Delete(scoped_refptr peerconnection) = 0; + + virtual scoped_refptr GetAudioDevice() = 0; + + virtual scoped_refptr GetVideoDevice() = 0; +#ifdef RTC_DESKTOP_DEVICE + virtual scoped_refptr GetDesktopDevice() = 0; +#endif + virtual scoped_refptr CreateAudioSource( + const string audio_source_label) = 0; + + virtual scoped_refptr CreateVideoSource( + scoped_refptr capturer, const string video_source_label, + scoped_refptr constraints) = 0; +#ifdef RTC_DESKTOP_DEVICE + virtual scoped_refptr CreateDesktopSource( + scoped_refptr capturer, + const string video_source_label, + scoped_refptr constraints) = 0; +#endif + virtual scoped_refptr CreateAudioTrack( + scoped_refptr source, const string track_id) = 0; + + virtual scoped_refptr CreateVideoTrack( + scoped_refptr source, const string track_id) = 0; + + virtual scoped_refptr CreateStream( + const string stream_id) = 0; + + virtual scoped_refptr GetRtpSenderCapabilities( + RTCMediaType media_type) = 0; + + virtual scoped_refptr GetRtpReceiverCapabilities( + RTCMediaType media_type) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_PEERCONNECTION_FACTORY_HXX diff --git a/third_party/libwebrtc/include/rtc_rtp_capabilities.h b/third_party/libwebrtc/include/rtc_rtp_capabilities.h new file mode 100644 index 0000000000..39b49765fc --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_capabilities.h @@ -0,0 +1,60 @@ +#ifndef LIB_WBBRTC_RTC_RTP_CAPABILITIES_HXX +#define LIB_WBBRTC_RTC_RTP_CAPABILITIES_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCRtpCodecCapability : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(); + + virtual void set_mime_type(const string& mime_type) = 0; + virtual void set_clock_rate(int clock_rate) = 0; + virtual void set_channels(int channels) = 0; + virtual void set_sdp_fmtp_line(const string& sdp_fmtp_line) = 0; + + virtual string mime_type() const = 0; + virtual int clock_rate() const = 0; + virtual int channels() const = 0; + virtual string sdp_fmtp_line() const = 0; + + protected: + virtual ~RTCRtpCodecCapability() {} +}; + +class RTCRtpHeaderExtensionCapability : public RefCountInterface { + public: + virtual const string uri() = 0; + virtual void set_uri(const string uri) = 0; + + virtual int preferred_id() = 0; + virtual void set_preferred_id(int value) = 0; + + virtual bool preferred_encrypt() = 0; + virtual void set_preferred_encrypt(bool value) = 0; +}; + +class RTCRtpCapabilities : public RefCountInterface { + public: + virtual const vector> codecs() = 0; + virtual void set_codecs( + const vector> codecs) = 0; + + virtual const vector> + header_extensions() = 0; + + virtual void set_header_extensions( + const vector> + header_extensions) = 0; + + // virtual const vector> fec() = 0; + // virtual void set_fec(const vector> fec) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WBBRTC_RTC_RTP_CAPABILITIES_HXX diff --git a/third_party/libwebrtc/include/rtc_rtp_parameters.h b/third_party/libwebrtc/include/rtc_rtp_parameters.h new file mode 100644 index 0000000000..3ef87155d5 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_parameters.h @@ -0,0 +1,243 @@ +#ifndef LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX +#define LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_types.h" + +namespace libwebrtc { + +enum class RTCRtpTransceiverDirection { + kSendRecv, + kSendOnly, + kRecvOnly, + kInactive, + kStopped, +}; + +enum class RTCFecMechanism { + RED, + RED_AND_ULPFEC, + FLEXFEC, +}; + +enum class RTCRtcpFeedbackType { + CCM, + LNTF, + NACK, + REMB, + TRANSPORT_CC, +}; + +enum class RTCRtcpFeedbackMessageType { + GENERIC_NACK, + PLI, + FIR, +}; + +enum class RTCDtxStatus { + DISABLED, + ENABLED, +}; + +enum class RTCDegradationPreference { + DISABLED, + MAINTAIN_FRAMERATE, + MAINTAIN_RESOLUTION, + BALANCED, +}; + +class RTCRtcpFeedback : public RefCountInterface { + virtual RTCRtcpFeedbackType type() = 0; + virtual void set_type(RTCRtcpFeedbackType value) = 0; + + virtual RTCRtcpFeedbackMessageType message_type() = 0; + virtual void set_message_type(RTCRtcpFeedbackMessageType value) = 0; + + virtual bool operator==(scoped_refptr o) = 0; + virtual bool operator!=(scoped_refptr o) = 0; +}; + +class RTCRtpExtension : public RefCountInterface { + public: + enum RTCFilter { + kDiscardEncryptedExtension, + kPreferEncryptedExtension, + kRequireEncryptedExtension, + }; + + virtual const string ToString() const = 0; + virtual bool operator==(scoped_refptr o) const = 0; + + virtual const string uri() = 0; + virtual void set_uri(const string uri) = 0; + + virtual int id() = 0; + virtual void set_id(int value) = 0; + + virtual bool encrypt() = 0; + virtual void set_encrypt(bool value) = 0; +}; + +class RtpFecParameters : public RefCountInterface { + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual RTCFecMechanism mechanism() = 0; + virtual void set_mechanism(RTCFecMechanism value) = 0; + + virtual bool operator==(const RtpFecParameters& o) const = 0; + virtual bool operator!=(const RtpFecParameters& o) const = 0; +}; + +class RTCRtpRtxParameters : public RefCountInterface { + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +class RTCRtpCodecParameters : public RefCountInterface { + public: + virtual const string mime_type() const = 0; + + virtual const string name() = 0; + virtual void set_name(const string name) = 0; + + virtual RTCMediaType kind() = 0; + virtual void set_kind(RTCMediaType value) = 0; + + virtual int payload_type() = 0; + virtual void set_payload_type(int value) = 0; + + virtual int clock_rate() = 0; + virtual void set_clock_rate(int value) = 0; + + virtual int num_channels() = 0; + virtual void set_num_channels(int value) = 0; + + virtual int max_ptime() = 0; + virtual void set_max_ptime(int value) = 0; + + virtual int ptime() = 0; + virtual void set_ptime(int value) = 0; + + virtual const vector> rtcp_feedback() = 0; + virtual void set_rtcp_feedback( + const vector> feecbacks) = 0; + + virtual const vector> parameters() = 0; + virtual void set_parameters(const map parameters) = 0; + + virtual bool operator==(scoped_refptr o) = 0; + virtual bool operator!=(scoped_refptr o) = 0; + + protected: + virtual ~RTCRtpCodecParameters() {} +}; + +class RTCRtcpParameters : public RefCountInterface { + public: + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual const string cname() = 0; + virtual void set_cname(const string) = 0; + + virtual bool reduced_size() = 0; + virtual void set_reduced_size(bool value) = 0; + + virtual bool mux() = 0; + virtual void set_mux(bool value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +enum class RTCPriority { + kVeryLow, + kLow, + kMedium, + kHigh, +}; + +class RTCRtpEncodingParameters : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(); + + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual double bitrate_priority() = 0; + virtual void set_bitrate_priority(double value) = 0; + + virtual RTCPriority network_priority() = 0; + virtual void set_network_priority(RTCPriority value) = 0; + + virtual int max_bitrate_bps() = 0; + virtual void set_max_bitrate_bps(int value) = 0; + + virtual int min_bitrate_bps() = 0; + virtual void set_min_bitrate_bps(int value) = 0; + + virtual double max_framerate() = 0; + virtual void set_max_framerate(double value) = 0; + + virtual int num_temporal_layers() = 0; + virtual void set_num_temporal_layers(int value) = 0; + + virtual double scale_resolution_down_by() = 0; + virtual void set_scale_resolution_down_by(double value) = 0; + + virtual const string scalability_mode() = 0; + virtual void set_scalability_mode(const string mode) = 0; + + virtual bool active() = 0; + virtual void set_active(bool value) = 0; + + virtual const string rid() = 0; + virtual void set_rid(const string rid) = 0; + + virtual bool adaptive_ptime() = 0; + virtual void set_adaptive_ptime(bool value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +struct RTCRtpParameters : public RefCountInterface { + public: + // static scoped_refptr Create(); + virtual const string transaction_id() = 0; + virtual void set_transaction_id(const string id) = 0; + + virtual const string mid() = 0; + virtual void set_mid(const string mid) = 0; + + virtual const vector> codecs() = 0; + virtual void set_codecs( + const vector> codecs) = 0; + + virtual const vector> header_extensions() = 0; + virtual void set_header_extensions( + const vector> header_extensions) = 0; + + virtual const vector> encodings() = 0; + virtual void set_encodings( + const vector> encodings) = 0; + + virtual scoped_refptr rtcp_parameters() = 0; + virtual void set_rtcp_parameters( + scoped_refptr rtcp_parameters) = 0; + + virtual RTCDegradationPreference GetDegradationPreference() = 0; + virtual void SetDegradationPreference(RTCDegradationPreference value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +} // namespace libwebrtc +#endif // LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_rtp_receiver.h b/third_party/libwebrtc/include/rtc_rtp_receiver.h new file mode 100644 index 0000000000..a72f2b8a06 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_receiver.h @@ -0,0 +1,61 @@ +#ifndef LIB_WEBRTC_RTP_RECEIVER_HXX +#define LIB_WEBRTC_RTP_RECEIVER_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +// #include "rtc_frame_decryptor.h" +// #include "rtc_frame_encryptor.h" + +namespace libwebrtc { + +class RTCMediaTrack; +class RTCMediaStream; +class RTCDtlsTransport; + +class RTCRtpReceiverObserver { + public: + virtual void OnFirstPacketReceived(RTCMediaType media_type) = 0; + + protected: + virtual ~RTCRtpReceiverObserver() {} +}; + +class RTCRtpReceiver : public RefCountInterface { + public: + virtual scoped_refptr track() const = 0; + + virtual scoped_refptr dtls_transport() const = 0; + + virtual const vector stream_ids() const = 0; + + virtual vector> streams() const = 0; + + virtual RTCMediaType media_type() const = 0; + + virtual const string id() const = 0; + + virtual scoped_refptr parameters() const = 0; + + virtual bool set_parameters(scoped_refptr parameters) = 0; + + virtual void SetObserver(RTCRtpReceiverObserver* observer) = 0; + + virtual void SetJitterBufferMinimumDelay(double delay_seconds) = 0; + + // virtual Vector GetSources() const = 0; + + // virtual void SetFrameDecryptor( + // scoped_refptr frame_decryptor); + + // virtual scoped_refptr GetFrameDecryptor() const = 0; + + // virtual void SetDepacketizerToDecoderFrameTransformer( + // scoped_refptr frame_transformer) = 0; +}; + +} // namespace libwebrtc + +#endif // !LIB_WEBRTC_RTP_RECEIVER_H_ \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_rtp_sender.h b/third_party/libwebrtc/include/rtc_rtp_sender.h new file mode 100644 index 0000000000..9c2f73249b --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_sender.h @@ -0,0 +1,46 @@ +#ifndef LIB_WEBRTC_RTC_RTP_SENDER_HXX +#define LIB_WEBRTC_RTC_RTP_SENDER_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCMediaTrack; +class RTCDtlsTransport; +class RTCDtmfSender; + +class RTCRtpSender : public RefCountInterface { + public: + virtual bool set_track(scoped_refptr track) = 0; + + virtual scoped_refptr track() const = 0; + + virtual scoped_refptr dtls_transport() const = 0; + + virtual uint32_t ssrc() const = 0; + + virtual RTCMediaType media_type() const = 0; + + virtual const string id() const = 0; + + virtual const vector stream_ids() const = 0; + + virtual void set_stream_ids(const vector stream_ids) const = 0; + + virtual const vector> + init_send_encodings() const = 0; + + virtual scoped_refptr parameters() const = 0; + + virtual bool set_parameters( + const scoped_refptr parameters) = 0; + + virtual scoped_refptr dtmf_sender() const = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_TYPES_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_rtp_transceiver.h b/third_party/libwebrtc/include/rtc_rtp_transceiver.h new file mode 100644 index 0000000000..ecf24f45b6 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_transceiver.h @@ -0,0 +1,66 @@ +#ifndef LIB_WEBRTC_RTC_RTP_TRANSCEIVER_HXX +#define LIB_WEBRTC_RTC_RTP_TRANSCEIVER_HXX + +#include "base/refcount.h" +#include "rtc_rtp_capabilities.h" +#include "rtc_rtp_parameters.h" +#include "rtc_rtp_receiver.h" +#include "rtc_rtp_sender.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCRtpTransceiverInit : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create( + RTCRtpTransceiverDirection direction, const vector stream_ids, + const vector> encodings); + + virtual RTCRtpTransceiverDirection direction() = 0; + virtual void set_direction(RTCRtpTransceiverDirection value) = 0; + + virtual const vector stream_ids() = 0; + virtual void set_stream_ids(const vector ids) = 0; + + virtual const vector> + send_encodings() = 0; + virtual void set_send_encodings( + const vector> send_encodings) = 0; +}; + +class RTCRtpTransceiver : public RefCountInterface { + public: + virtual RTCMediaType media_type() const = 0; + + virtual const string mid() const = 0; + + virtual scoped_refptr sender() const = 0; + + virtual scoped_refptr receiver() const = 0; + + virtual bool Stopped() const = 0; + + virtual bool Stopping() const = 0; + + virtual RTCRtpTransceiverDirection direction() const = 0; + + virtual const string SetDirectionWithError( + RTCRtpTransceiverDirection new_direction) = 0; + + virtual RTCRtpTransceiverDirection current_direction() const = 0; + + virtual RTCRtpTransceiverDirection fired_direction() const = 0; + + virtual const string StopStandard() = 0; + + virtual void StopInternal() = 0; + + virtual void SetCodecPreferences( + vector> codecs) = 0; + + virtual const string transceiver_id() const = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_TYPES_HXX diff --git a/third_party/libwebrtc/include/rtc_session_description.h b/third_party/libwebrtc/include/rtc_session_description.h new file mode 100644 index 0000000000..84aa2abbc9 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_session_description.h @@ -0,0 +1,30 @@ +#ifndef LIB_WEBRTC_RTC_SESSION_DESCRIPTION_HXX +#define LIB_WEBRTC_RTC_SESSION_DESCRIPTION_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCSessionDescription : public RefCountInterface { + public: + enum SdpType { kOffer = 0, kPrAnswer, kAnswer }; + + static LIB_WEBRTC_API scoped_refptr Create( + const string type, const string sdp, SdpParseError* error); + + public: + virtual const string sdp() const = 0; + + virtual const string type() = 0; + + virtual SdpType GetType() = 0; + + virtual bool ToString(string& out) = 0; + + protected: + virtual ~RTCSessionDescription() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_SESSION_DESCRIPTION_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_types.h b/third_party/libwebrtc/include/rtc_types.h new file mode 100644 index 0000000000..017386d071 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_types.h @@ -0,0 +1,113 @@ +#ifndef LIB_WEBRTC_RTC_TYPES_HXX +#define LIB_WEBRTC_RTC_TYPES_HXX + +#ifdef LIB_WEBRTC_API_EXPORTS +#define LIB_WEBRTC_API __declspec(dllexport) +#elif defined(LIB_WEBRTC_API_DLL) +#define LIB_WEBRTC_API __declspec(dllimport) +#elif !defined(WIN32) +#define LIB_WEBRTC_API __attribute__((visibility("default"))) +#else +#define LIB_WEBRTC_API +#endif + +#include "base/fixed_size_function.h" +#include "base/portable.h" +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" + +namespace libwebrtc { + +enum { kMaxIceServerSize = 8 }; + +// template +// using vector = bsp::inlined_vector; + +template +using map = std::map; + +enum class MediaSecurityType { kSRTP_None = 0, kSDES_SRTP, kDTLS_SRTP }; + +enum class RTCMediaType { AUDIO, VIDEO, DATA, UNSUPPORTED }; + +using string = portable::string; + +// template +// using map = portable::map; + +template +using vector = portable::vector; + +struct IceServer { + string uri; + string username; + string password; +}; + +enum class IceTransportsType { kNone, kRelay, kNoHost, kAll }; + +enum class TcpCandidatePolicy { + kTcpCandidatePolicyEnabled, + kTcpCandidatePolicyDisabled +}; + +enum class CandidateNetworkPolicy { + kCandidateNetworkPolicyAll, + kCandidateNetworkPolicyLowCost +}; + +enum class RtcpMuxPolicy { + kRtcpMuxPolicyNegotiate, + kRtcpMuxPolicyRequire, +}; + +enum BundlePolicy { + kBundlePolicyBalanced, + kBundlePolicyMaxBundle, + kBundlePolicyMaxCompat +}; + +enum class SdpSemantics { kPlanB, kUnifiedPlan }; + +struct RTCConfiguration { + IceServer ice_servers[kMaxIceServerSize]; + IceTransportsType type = IceTransportsType::kAll; + BundlePolicy bundle_policy = BundlePolicy::kBundlePolicyBalanced; + RtcpMuxPolicy rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyRequire; + CandidateNetworkPolicy candidate_network_policy = + CandidateNetworkPolicy::kCandidateNetworkPolicyAll; + TcpCandidatePolicy tcp_candidate_policy = + TcpCandidatePolicy::kTcpCandidatePolicyEnabled; + + int ice_candidate_pool_size = 0; + + MediaSecurityType srtp_type = MediaSecurityType::kDTLS_SRTP; + SdpSemantics sdp_semantics = SdpSemantics::kUnifiedPlan; + bool offer_to_receive_audio = true; + bool offer_to_receive_video = true; + + bool disable_ipv6 = false; + bool disable_ipv6_on_wifi = false; + int max_ipv6_networks = 5; + bool disable_link_local_networks = false; + int screencast_min_bitrate = -1; + + // private + bool use_rtp_mux = true; + uint32_t local_audio_bandwidth = 128; + uint32_t local_video_bandwidth = 512; +}; + +struct SdpParseError { + public: + // The sdp line that causes the error. + string line; + // Explains the error. + string description; +}; + +enum DesktopType { kScreen, kWindow }; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_TYPES_HXX diff --git a/third_party/libwebrtc/include/rtc_video_device.h b/third_party/libwebrtc/include/rtc_video_device.h new file mode 100644 index 0000000000..130849767c --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_device.h @@ -0,0 +1,41 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX +#define LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoCapturer : public RefCountInterface { + public: + virtual ~RTCVideoCapturer() {} + + virtual bool StartCapture() = 0; + + virtual bool CaptureStarted() = 0; + + virtual void StopCapture() = 0; +}; + +class RTCVideoDevice : public RefCountInterface { + public: + virtual uint32_t NumberOfDevices() = 0; + + virtual int32_t GetDeviceName(uint32_t deviceNumber, char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + uint32_t productUniqueIdUTF8Length = 0) = 0; + + virtual scoped_refptr Create(const char* name, + uint32_t index, size_t width, + size_t height, + size_t target_fps) = 0; + + protected: + virtual ~RTCVideoDevice() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX diff --git a/third_party/libwebrtc/include/rtc_video_frame.h b/third_party/libwebrtc/include/rtc_video_frame.h new file mode 100644 index 0000000000..a69023f052 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_frame.h @@ -0,0 +1,56 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_FRAME_HXX +#define LIB_WEBRTC_RTC_VIDEO_FRAME_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoFrame : public RefCountInterface { + public: + enum class Type { kARGB, kBGRA, kABGR, kRGBA }; + + enum VideoRotation { + kVideoRotation_0 = 0, + kVideoRotation_90 = 90, + kVideoRotation_180 = 180, + kVideoRotation_270 = 270 + }; + + public: + LIB_WEBRTC_API static scoped_refptr Create( + int width, int height, const uint8_t* buffer, int length); + + LIB_WEBRTC_API static scoped_refptr Create( + int width, int height, const uint8_t* data_y, int stride_y, + const uint8_t* data_u, int stride_u, const uint8_t* data_v, int stride_v); + + virtual scoped_refptr Copy() = 0; + + // The resolution of the frame in pixels. For formats where some planes are + // subsampled, this is the highest-resolution plane. + virtual int width() const = 0; + virtual int height() const = 0; + + virtual VideoRotation rotation() = 0; + + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataU() const = 0; + virtual const uint8_t* DataV() const = 0; + + // Returns the number of bytes between successive rows for a given plane. + virtual int StrideY() const = 0; + virtual int StrideU() const = 0; + virtual int StrideV() const = 0; + + virtual int ConvertToARGB(Type type, uint8_t* dst_argb, int dst_stride_argb, + int dest_width, int dest_height) = 0; + + protected: + virtual ~RTCVideoFrame() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_FRAME_HXX diff --git a/third_party/libwebrtc/include/rtc_video_renderer.h b/third_party/libwebrtc/include/rtc_video_renderer.h new file mode 100644 index 0000000000..7e81d463f6 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_renderer.h @@ -0,0 +1,18 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX +#define LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +template +class RTCVideoRenderer { + public: + virtual ~RTCVideoRenderer() {} + + virtual void OnFrame(VideoFrameT frame) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX diff --git a/third_party/libwebrtc/include/rtc_video_source.h b/third_party/libwebrtc/include/rtc_video_source.h new file mode 100644 index 0000000000..cb61abbb49 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_source.h @@ -0,0 +1,14 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX +#define LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoSource : public RefCountInterface { + public: + ~RTCVideoSource() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX diff --git a/third_party/libwebrtc/include/rtc_video_track.h b/third_party/libwebrtc/include/rtc_video_track.h new file mode 100644 index 0000000000..46850c5861 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_track.h @@ -0,0 +1,24 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_TRACK_HXX +#define LIB_WEBRTC_RTC_VIDEO_TRACK_HXX + +#include "rtc_media_track.h" +#include "rtc_types.h" +#include "rtc_video_frame.h" +#include "rtc_video_renderer.h" + +namespace libwebrtc { + +class RTCVideoTrack : public RTCMediaTrack { + public: + virtual void AddRenderer( + RTCVideoRenderer>* renderer) = 0; + + virtual void RemoveRenderer( + RTCVideoRenderer>* renderer) = 0; + + protected: + ~RTCVideoTrack() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_TRACK_HXX diff --git a/third_party/libwebrtc/lib/elinux-arm64/libwebrtc.so b/third_party/libwebrtc/lib/elinux-arm64/libwebrtc.so new file mode 120000 index 0000000000..6f27e3aef9 --- /dev/null +++ b/third_party/libwebrtc/lib/elinux-arm64/libwebrtc.so @@ -0,0 +1 @@ +../linux-arm64/libwebrtc.so \ No newline at end of file diff --git a/third_party/libwebrtc/lib/elinux-x64/libwebrtc.so b/third_party/libwebrtc/lib/elinux-x64/libwebrtc.so new file mode 120000 index 0000000000..8d3fde68ca --- /dev/null +++ b/third_party/libwebrtc/lib/elinux-x64/libwebrtc.so @@ -0,0 +1 @@ +../linux-x64/libwebrtc.so \ No newline at end of file diff --git a/macos/WebRTC.framework/Versions/A/WebRTC b/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so similarity index 55% rename from macos/WebRTC.framework/Versions/A/WebRTC rename to third_party/libwebrtc/lib/linux-arm64/libwebrtc.so index 22ab0841d1..22c3bde89f 100755 Binary files a/macos/WebRTC.framework/Versions/A/WebRTC and b/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so differ diff --git a/third_party/libwebrtc/lib/linux-x64/libwebrtc.so b/third_party/libwebrtc/lib/linux-x64/libwebrtc.so new file mode 100755 index 0000000000..ae9c4eec39 Binary files /dev/null and b/third_party/libwebrtc/lib/linux-x64/libwebrtc.so differ diff --git a/third_party/libwebrtc/lib/win64/libwebrtc.dll b/third_party/libwebrtc/lib/win64/libwebrtc.dll new file mode 100644 index 0000000000..e62437621a Binary files /dev/null and b/third_party/libwebrtc/lib/win64/libwebrtc.dll differ diff --git a/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib b/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib new file mode 100644 index 0000000000..c982e9577b Binary files /dev/null and b/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib differ diff --git a/third_party/svpng/LICENSE b/third_party/svpng/LICENSE new file mode 100644 index 0000000000..c4063034b2 --- /dev/null +++ b/third_party/svpng/LICENSE @@ -0,0 +1,26 @@ +Copyright (C) 2017 Milo Yip. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of pngout nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/third_party/svpng/svpng.hpp b/third_party/svpng/svpng.hpp new file mode 100644 index 0000000000..aa2332429d --- /dev/null +++ b/third_party/svpng/svpng.hpp @@ -0,0 +1,110 @@ +/* +Copyright (C) 2017 Milo Yip. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of pngout nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +/*! \file + \brief svpng() is a minimalistic C function for saving RGB/RGBA image into uncompressed PNG. + \author Milo Yip + \version 0.1.1 + \copyright MIT license + \sa http://github.com/miloyip/svpng +*/ + +#ifndef SVPNG_INC_ +#define SVPNG_INC_ + +/*! \def SVPNG_LINKAGE + \brief User customizable linkage for svpng() function. + By default this macro is empty. + User may define this macro as static for static linkage, + and/or inline in C99/C++, etc. +*/ +#ifndef SVPNG_LINKAGE +#define SVPNG_LINKAGE +#endif + +/*! \def SVPNG_OUTPUT + \brief User customizable output stream. + By default, it uses C file descriptor and fputc() to output bytes. + In C++, for example, user may use std::ostream or std::vector instead. +*/ +#ifndef SVPNG_OUTPUT +#include +#define SVPNG_OUTPUT FILE* fp +#endif + +/*! \def SVPNG_PUT + \brief Write a byte +*/ +#ifndef SVPNG_PUT +#define SVPNG_PUT(u) fputc(u, fp) +#endif + + +/*! + \brief Save a RGB/RGBA image in PNG format. + \param SVPNG_OUTPUT Output stream (by default using file descriptor). + \param w Width of the image. (<16383) + \param h Height of the image. + \param img Image pixel data in 24-bit RGB or 32-bit RGBA format. + \param alpha Whether the image contains alpha channel. +*/ +SVPNG_LINKAGE void svpng(SVPNG_OUTPUT, unsigned w, unsigned h, const unsigned char* img, int alpha) { + static const unsigned t[] = { 0, 0x1db71064, 0x3b6e20c8, 0x26d930ac, 0x76dc4190, 0x6b6b51f4, 0x4db26158, 0x5005713c, + /* CRC32 Table */ 0xedb88320, 0xf00f9344, 0xd6d6a3e8, 0xcb61b38c, 0x9b64c2b0, 0x86d3d2d4, 0xa00ae278, 0xbdbdf21c }; + unsigned a = 1, b = 0, c, p = w * (alpha ? 4 : 3) + 1, x, y, i; /* ADLER-a, ADLER-b, CRC, pitch */ +#define SVPNG_U8A(ua, l) for (i = 0; i < l; i++) SVPNG_PUT((ua)[i]); +#define SVPNG_U32(u) do { SVPNG_PUT((u) >> 24); SVPNG_PUT(((u) >> 16) & 255); SVPNG_PUT(((u) >> 8) & 255); SVPNG_PUT((u) & 255); } while(0) +#define SVPNG_U8C(u) do { SVPNG_PUT(u); c ^= (u); c = (c >> 4) ^ t[c & 15]; c = (c >> 4) ^ t[c & 15]; } while(0) +#define SVPNG_U8AC(ua, l) for (i = 0; i < l; i++) SVPNG_U8C((ua)[i]) +#define SVPNG_U16LC(u) do { SVPNG_U8C((u) & 255); SVPNG_U8C(((u) >> 8) & 255); } while(0) +#define SVPNG_U32C(u) do { SVPNG_U8C((u) >> 24); SVPNG_U8C(((u) >> 16) & 255); SVPNG_U8C(((u) >> 8) & 255); SVPNG_U8C((u) & 255); } while(0) +#define SVPNG_U8ADLER(u) do { SVPNG_U8C(u); a = (a + (u)) % 65521; b = (b + a) % 65521; } while(0) +#define SVPNG_BEGIN(s, l) do { SVPNG_U32(l); c = ~0U; SVPNG_U8AC(s, 4); } while(0) +#define SVPNG_END() SVPNG_U32(~c) + SVPNG_U8A("\x89PNG\r\n\32\n", 8); /* Magic */ + SVPNG_BEGIN("IHDR", 13); /* IHDR chunk { */ + SVPNG_U32C(w); SVPNG_U32C(h); /* Width & Height (8 bytes) */ + SVPNG_U8C(8); SVPNG_U8C(alpha ? 6 : 2); /* Depth=8, Color=True color with/without alpha (2 bytes) */ + SVPNG_U8AC("\0\0\0", 3); /* Compression=Deflate, Filter=No, Interlace=No (3 bytes) */ + SVPNG_END(); /* } */ + SVPNG_BEGIN("IDAT", 2 + h * (5 + p) + 4); /* IDAT chunk { */ + SVPNG_U8AC("\x78\1", 2); /* Deflate block begin (2 bytes) */ + for (y = 0; y < h; y++) { /* Each horizontal line makes a block for simplicity */ + SVPNG_U8C(y == h - 1); /* 1 for the last block, 0 for others (1 byte) */ + SVPNG_U16LC(p); SVPNG_U16LC(~p); /* Size of block in little endian and its 1's complement (4 bytes) */ + SVPNG_U8ADLER(0); /* No filter prefix (1 byte) */ + for (x = 0; x < p - 1; x++, img++) + SVPNG_U8ADLER(*img); /* Image pixel data */ + } + SVPNG_U32C((b << 16) | a); /* Deflate block end with adler (4 bytes) */ + SVPNG_END(); /* } */ + SVPNG_BEGIN("IEND", 0); SVPNG_END(); /* IEND chunk {} */ +} + +#endif /* SVPNG_INC_ */ \ No newline at end of file diff --git a/third_party/uuidxx/LICENSE b/third_party/uuidxx/LICENSE new file mode 100644 index 0000000000..6eaf314417 --- /dev/null +++ b/third_party/uuidxx/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2016 by NeoSmart Technologies + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/third_party/uuidxx/uuidxx.cc b/third_party/uuidxx/uuidxx.cc new file mode 100644 index 0000000000..a504635c24 --- /dev/null +++ b/third_party/uuidxx/uuidxx.cc @@ -0,0 +1,104 @@ +#ifdef _WIN32 +#ifndef _CRT_SECURE_NO_WARNINGS +#define _CRT_SECURE_NO_WARNINGS +#endif +#endif + +#include "uuidxx.h" +#include +#include +#include +#include + +using namespace std; +using namespace uuidxx; + +bool uuid::operator==(const uuid& guid2) const { + return memcmp(&guid2, this, sizeof(uuid)) == 0; +} + +bool uuid::operator!=(const uuid& guid2) const { + return !(*this == guid2); +} + +bool uuid::operator<(const uuid& guid2) const { + return memcmp(this, &guid2, sizeof(uuid)) < 0; +} + +bool uuid::operator>(const uuid& guid2) const { + return memcmp(this, &guid2, sizeof(uuid)) > 0; +} + +uuid::uuid(const std::string& uuidString) : uuid(uuidString.c_str()) {} + +uuid::uuid(const char* uuidString) { + if (uuidString == nullptr) { + // special case, and prevents random bugs + memset(this, 0, sizeof(uuid)); + return; + } + + if (uuidString[0] == '{') { + sscanf(uuidString, + "{%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8 + "-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 + "%02" SCNx8 "}", + &Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0], + &Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4], + &Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]); + } else { + sscanf(uuidString, + "%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8 + "-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 + "%02" SCNx8 "", + &Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0], + &Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4], + &Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]); + } +} + +string uuid::ToString(bool withBraces) const { + char buffer[39]; + sprintf(buffer, "%s%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X%s", + withBraces ? "{" : "", Uuid.Data1, Uuid.Data2, Uuid.Data3, + Uuid.Data4[0], Uuid.Data4[1], Uuid.Data4[2], Uuid.Data4[3], + Uuid.Data4[4], Uuid.Data4[5], Uuid.Data4[6], Uuid.Data4[7], + withBraces ? "}" : ""); + return buffer; +} + +uuid uuid::FromString(const char* uuidString) { + uuid temp(uuidString); + return temp; +} + +uuid uuid::FromString(const std::string& uuidString) { + uuid temp(uuidString.c_str()); + return temp; +} + +uuid uuid::Generatev4() { + // mach-o does not support TLS and clang still has issues with thread_local +#if !defined(__APPLE__) && !defined(__clang__) + thread_local std::random_device rd; + thread_local auto gen = std::mt19937_64(rd()); +#else + std::random_device rd; + std::mt19937_64 gen(rd()); +#endif + std::uniform_int_distribution dis64; + + uuid newGuid; + newGuid.WideIntegers[0] = dis64(gen); + newGuid.WideIntegers[1] = dis64(gen); + + // RFC4122 defines (psuedo)random uuids (in big-endian notation): + // MSB of DATA4[0] specifies the variant and should be 0b10 to indicate + // standard uuid, and MSB of DATA3 should be 0b0100 to indicate version 4 + newGuid.Bytes.Data4[0] = + (newGuid.Bytes.Data4[0] & 0x3F) | static_cast(0x80); + newGuid.Bytes.Data3[1] = + (newGuid.Bytes.Data3[1] & 0x0F) | static_cast(0x40); + + return newGuid; +} diff --git a/third_party/uuidxx/uuidxx.h b/third_party/uuidxx/uuidxx.h new file mode 100644 index 0000000000..2d36271886 --- /dev/null +++ b/third_party/uuidxx/uuidxx.h @@ -0,0 +1,66 @@ +#pragma once + +#include +#include +#include + +namespace uuidxx { +enum class Variant { Nil, Version1, Version2, Version3, Version4, Version5 }; + +class NotImplemented : public std::logic_error { + public: + NotImplemented() : std::logic_error("Function not yet implemented"){}; +}; + +union uuid { + private: + static uuid Generatev4(); + + public: + uint64_t WideIntegers[2]; + struct _internalData { + uint32_t Data1; + uint16_t Data2; + uint16_t Data3; + uint8_t Data4[8]; + } Uuid; + struct _byteRepresentation { + uint8_t Data1[4]; + uint8_t Data2[2]; + uint8_t Data3[2]; + uint8_t Data4[8]; + } Bytes; + + bool operator==(const uuid& guid2) const; + bool operator!=(const uuid& guid2) const; + bool operator<(const uuid& guid2) const; + bool operator>(const uuid& guid2) const; + + uuid() = default; + + uuid(const char* uuidString); + uuid(const std::string& uuidString); + static uuid FromString(const char* uuidString); + static uuid FromString(const std::string& uuidString); + + static inline uuid Generate(Variant v = Variant::Version4) { + switch (v) { + case Variant::Nil: + return uuid(nullptr); // special case; + case Variant::Version1: + case Variant::Version2: + case Variant::Version3: + case Variant::Version5: + throw new NotImplemented(); + case Variant::Version4: + return Generatev4(); + } + return uuid(nullptr); + } + + std::string ToString(bool withBraces = true) const; +}; + +static_assert(sizeof(uuid) == 2 * sizeof(int64_t), + "Check uuid type declaration/padding!"); +} // namespace uuidxx diff --git a/windows/.gitignore b/windows/.gitignore new file mode 100644 index 0000000000..c765fa7864 --- /dev/null +++ b/windows/.gitignore @@ -0,0 +1,4 @@ +flutter/ + +# Visual Studio files +*.user diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt new file mode 100644 index 0000000000..39ee5943ac --- /dev/null +++ b/windows/CMakeLists.txt @@ -0,0 +1,53 @@ +cmake_minimum_required(VERSION 3.15) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed +set(PLUGIN_NAME "flutter_webrtc_plugin") + +add_definitions(-DLIB_WEBRTC_API_DLL) +add_definitions(-DRTC_DESKTOP_DEVICE) + +add_library(${PLUGIN_NAME} SHARED + "../common/cpp/src/flutter_common.cc" + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "../third_party/uuidxx/uuidxx.cc" + "flutter_webrtc_plugin.cc" + "task_runner_windows.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/uuidxx" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE + flutter + flutter_wrapper_plugin + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/win64/libwebrtc.dll.lib" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/win64/libwebrtc.dll" + PARENT_SCOPE +) diff --git a/windows/flutter_webrtc/flutter_web_r_t_c_plugin.h b/windows/flutter_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..ed2d45208b --- /dev/null +++ b/windows/flutter_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,23 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __declspec(dllexport) +#else +#define FLUTTER_PLUGIN_EXPORT __declspec(dllimport) +#endif + +#if defined(__cplusplus) +extern "C" { +#endif + +FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar); + +#if defined(__cplusplus) +} // extern "C" +#endif + +#endif // PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ diff --git a/windows/flutter_webrtc_plugin.cc b/windows/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..836989c00e --- /dev/null +++ b/windows/flutter_webrtc_plugin.cc @@ -0,0 +1,79 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" +#include "task_runner_windows.h" + +#include + +const char* kChannelName = "FlutterWebRTC.Method"; + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + TaskRunner* task_runner() { return task_runner_.get(); } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()), + task_runner_(std::make_unique()) { + webrtc_ = std::make_unique(this); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; + std::unique_ptr task_runner_; +}; + +} // namespace flutter_webrtc_plugin + + +void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar) { + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + flutter::PluginRegistrarManager::GetInstance() + ->GetRegistrar(registrar)); +} \ No newline at end of file diff --git a/windows/task_runner_windows.cc b/windows/task_runner_windows.cc new file mode 100644 index 0000000000..0c47ab519e --- /dev/null +++ b/windows/task_runner_windows.cc @@ -0,0 +1,106 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. + + #include "task_runner_windows.h" + + #include + #include + + namespace flutter_webrtc_plugin { + + TaskRunnerWindows::TaskRunnerWindows() { + WNDCLASS window_class = RegisterWindowClass(); + window_handle_ = + CreateWindowEx(0, window_class.lpszClassName, L"", 0, 0, 0, 0, 0, + HWND_MESSAGE, nullptr, window_class.hInstance, nullptr); + + if (window_handle_) { + SetWindowLongPtr(window_handle_, GWLP_USERDATA, + reinterpret_cast(this)); + } else { + auto error = GetLastError(); + LPWSTR message = nullptr; + FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | + FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, error, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + reinterpret_cast(&message), 0, NULL); + OutputDebugString(message); + LocalFree(message); + } + } + + TaskRunnerWindows::~TaskRunnerWindows() { + if (window_handle_) { + DestroyWindow(window_handle_); + window_handle_ = nullptr; + } + UnregisterClass(window_class_name_.c_str(), nullptr); + } + + void TaskRunnerWindows::EnqueueTask(TaskClosure task) { + { + std::lock_guard lock(tasks_mutex_); + tasks_.push(task); + } + if (!PostMessage(window_handle_, WM_NULL, 0, 0)) { + DWORD error_code = GetLastError(); + std::cerr << "Failed to post message to main thread; error_code: " + << error_code << std::endl; + } + } + + void TaskRunnerWindows::ProcessTasks() { + // Even though it would usually be sufficient to process only a single task + // whenever we receive the message, if the message queue happens to be full, + // we might not receive a message for each individual task. + for (;;) { + std::lock_guard lock(tasks_mutex_); + if (tasks_.empty()) break; + TaskClosure task = tasks_.front(); + tasks_.pop(); + task(); + } + } + + WNDCLASS TaskRunnerWindows::RegisterWindowClass() { + window_class_name_ = L"FlutterWebRTCWindowsTaskRunnerWindow"; + + WNDCLASS window_class{}; + window_class.hCursor = nullptr; + window_class.lpszClassName = window_class_name_.c_str(); + window_class.style = 0; + window_class.cbClsExtra = 0; + window_class.cbWndExtra = 0; + window_class.hInstance = GetModuleHandle(nullptr); + window_class.hIcon = nullptr; + window_class.hbrBackground = 0; + window_class.lpszMenuName = nullptr; + window_class.lpfnWndProc = WndProc; + RegisterClass(&window_class); + return window_class; + } + + LRESULT + TaskRunnerWindows::HandleMessage(UINT const message, WPARAM const wparam, + LPARAM const lparam) noexcept { + switch (message) { + case WM_NULL: + ProcessTasks(); + return 0; + } + return DefWindowProcW(window_handle_, message, wparam, lparam); + } + + LRESULT TaskRunnerWindows::WndProc(HWND const window, UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + if (auto* that = reinterpret_cast( + GetWindowLongPtr(window, GWLP_USERDATA))) { + return that->HandleMessage(message, wparam, lparam); + } else { + return DefWindowProc(window, message, wparam, lparam); + } + } + + } // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/windows/task_runner_windows.h b/windows/task_runner_windows.h new file mode 100644 index 0000000000..f86c99d3f8 --- /dev/null +++ b/windows/task_runner_windows.h @@ -0,0 +1,55 @@ +// Copyright 2024 The Flutter Authors. All rights reserved. + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. + #ifndef PACKAGES_FLUTTER_WEBRTC_WINDOWS_TASK_RUNNER_WINDOW_H_ + #define PACKAGES_FLUTTER_WEBRTC_WINDOWS_TASK_RUNNER_WINDOW_H_ + + #include + + #include + #include + #include + #include + #include + + #include "task_runner.h" + + namespace flutter_webrtc_plugin { + + // Hidden HWND responsible for processing camera tasks on main thread + // Adapted from Flutter Engine, see: + // https://github.com/flutter/flutter/issues/134346#issuecomment-2141023146 + // and: + // https://github.com/flutter/engine/blob/d7c0bcfe7a30408b0722c9d47d8b0b1e4cdb9c81/shell/platform/windows/task_runner_window.h + class TaskRunnerWindows : public TaskRunner { + public: + virtual void EnqueueTask(TaskClosure task); + + TaskRunnerWindows(); + ~TaskRunnerWindows(); + + private: + void ProcessTasks(); + + WNDCLASS RegisterWindowClass(); + + LRESULT + HandleMessage(UINT const message, WPARAM const wparam, + LPARAM const lparam) noexcept; + + static LRESULT CALLBACK WndProc(HWND const window, UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept; + + HWND window_handle_; + std::wstring window_class_name_; + std::mutex tasks_mutex_; + std::queue tasks_; + + // Prevent copying. + TaskRunnerWindows(TaskRunnerWindows const&) = delete; + TaskRunnerWindows& operator=(TaskRunnerWindows const&) = delete; + }; + } // namespace flutter_webrtc_plugin + + #endif // PACKAGES_FLUTTER_WEBRTC_WINDOWS_TASK_RUNNER_WINDOW_H_ \ No newline at end of file