diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000000..3c73f32a33 --- /dev/null +++ b/.clang-format @@ -0,0 +1,12 @@ +# Defines the Chromium style for automatic reformatting. +# http://clang.llvm.org/docs/ClangFormatStyleOptions.html +BasedOnStyle: Chromium +# This defaults to 'Auto'. Explicitly set it for a while, so that +# 'vector >' in existing files gets formatted to +# 'vector>'. ('Auto' means that clang-format will only use +# 'int>>' if the file already contains at least one such instance.) +Standard: Cpp11 +SortIncludes: true +--- +Language: ObjC +ColumnLimit: 100 diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000000..258872f8da --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms +open_collective: flutter-webrtc +custom: ['https://www.paypal.me/duanweiwei'] diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..4ec4900714 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,21 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** + +**To Reproduce** + +**Expected behavior** + +**Platform information** + +* **Flutter version**: +* **Plugin version**: +* **OS**: +* **OS version**: diff --git a/.github/ISSUE_TEMPLATE/custom.md b/.github/ISSUE_TEMPLATE/custom.md new file mode 100644 index 0000000000..48d5f81fa4 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/custom.md @@ -0,0 +1,10 @@ +--- +name: Custom issue template +about: Describe this issue template's purpose here. +title: '' +labels: '' +assignees: '' + +--- + + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..3b9ba1435e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,16 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** + +**Describe the solution you'd like** + +**Describe alternatives you've considered** + +**Additional context** diff --git a/.github/stale.yml b/.github/stale.yml new file mode 100644 index 0000000000..e3304780b2 --- /dev/null +++ b/.github/stale.yml @@ -0,0 +1,20 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: 60 +# Number of days of inactivity before a stale issue is closed +daysUntilClose: 7 +# Issues with these labels will never be considered stale +exemptLabels: + - pinned + - security + - 🐛bug + - 🚀enhancement + - 😭help%20wanted +# Label to use when marking an issue as stale +staleLabel: wontfix +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. Thank you + for your contributions. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000000..63fe195530 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,161 @@ +name: Build + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + dart-format-and-analyze-check: + name: Dart Format Check + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Dart Format Check + run: dart format lib/ test/ --set-exit-if-changed + - name: Import Sorter Check + run: flutter pub run import_sorter:main --no-comments --exit-if-changed + - name: Dart Analyze Check + run: flutter analyze + - name: Dart Test Check + run: flutter test + + build-for-android: + name: Build for Flutter Android + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '17.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for Android + working-directory: ./example + run: flutter build apk + + build-for-ios: + name: Build for Flutter iOS + runs-on: macos-latest + + steps: + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for iOS + working-directory: ./example + run: flutter build ios --release --no-codesign + + build-for-windows: + name: Build for flutter Windows + runs-on: windows-latest + + steps: + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v1 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for Windows + working-directory: ./example + run: flutter build windows --release + + build-for-macos: + name: Build for flutter macOS + runs-on: macos-latest + + steps: + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v1 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Build for macOS + working-directory: ./example + run: flutter build macos --release + + build-for-linux: + name: Build for Flutter Linux + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: Run apt update + run: sudo apt-get update + - name: Install ninja-build libgtk-3-dev + run: sudo apt-get install -y ninja-build libgtk-3-dev + - name: Build for Linux + working-directory: ./example + run: flutter build linux + + build-for-elinux: + name: Build for Flutter Embedded Linux + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Run apt update + run: sudo apt-get update + - name: Install ninja-build libgtk-3-dev + run: sudo apt-get install -y ninja-build libgtk-3-dev + - name: Install elinux + run: git clone https://github.com/sony/flutter-elinux.git -b 3.27.1 ~/flutter-elinux + - name: Build for elinux + working-directory: ./example + run: /home/runner/flutter-elinux/bin/flutter-elinux pub get && /home/runner/flutter-elinux/bin/flutter-elinux build elinux + + build-for-web: + name: Build for Flutter Web + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '12.x' + - uses: actions/checkout@v2 + - uses: subosito/flutter-action@v2 + with: + channel: 'stable' + - name: Install project dependencies + run: flutter pub get + - name: build for Web + working-directory: ./example + run: flutter build web diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 0000000000..417f0f5aa5 --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,15 @@ +# .github/workflows/publish.yml +name: Publish to pub.dev + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+*' + +jobs: + publish: + permissions: + id-token: write # Required for authentication using OIDC + uses: dart-lang/setup-dart/.github/workflows/publish.yml@v1 + # with: + # working-directory: path/to/package/within/repository diff --git a/.gitignore b/.gitignore index c9cf2afd7c..0bab26c22a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,58 @@ +*.iml +.idea .DS_Store -.packages -.vscode/.DS_Store example/pubspec.lock pubspec.lock example/ios/Podfile.lock +GeneratedPluginRegistrant.java +example/android/.gradle +WorkspaceSettings.xcsettings +example/.flutter-plugins +example/android/local.properties +.dart_tool/package_config.json +android/.project +example/ios/Runner/GeneratedPluginRegistrant.m +example/ios/Runner/GeneratedPluginRegistrant.h +example/ios/Flutter/Generated.xcconfig +example/ios/Flutter/flutter_export_environment.sh + +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# Flutter/Dart/Pub related +**/doc/api/ +.dart_tool/ +.flutter-plugins +.flutter-plugins-dependencies +.packages +.pub-cache/ +.pub/ +/build/ +/android/.gradle/ + + +android/.classpath +android/.settings/org.eclipse.buildship.core.prefs + +# VSCode +.vscode/ + +!webrtc_android.iml +!webrtc.iml + +# vs +*.pdb \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f621ff1f1..f4687f52a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,934 @@ -## [0.0.1] - 2018.05.30 + +# Changelog + +[0.14.2] - 2025-07-01 + +* [Windows/Linux] feat: Add audio processing and sink API for cpp. (#1867) +* [Linux] fix: Fixed audio device selection error for Linux. (#1864) +* [Android] fix: Fix screen capture orientation for landscape-native devices (#1854) + +[0.14.1] - 2025-05-22 + +* [Android] fix: Recording bug (#1839) +* [Android] fix: calls in terminated mode by disabling orientation manager (#1840) +* [Android] fix: Wait for audio and video thread to fully stop to avoid corrupted recordings (#1836) + +[0.14.0] - 2025-05-06 + +* [iOS/Android]feat: Media Recorder implementation Android and iOS (#1810) +* [Wndows] fix: Pickup registrar for plugin by plugin registrar manager (#1752) +* [Linux] fix: add task runner for linux. (#1821) +* [iOS/macOS] fix: Fix deadlock when creating a frame cryptor on iOS/macOS. + +[0.13.1+hotfix.1] - 2025-04-07 + +* [Android] fix: Fix `clearAndroidCommunicationDevice` call blocking. + +[0.13.1] - 2025-04-03 + +* [Android] fix: remove setPreferredInputDevice when getUserAduio. (#1808) +* [Web] fix: race condition in RTCVideoRenderer for Web (#1805) +* [Android] fix: Migrate from onSurfaceDestroyed to onSurfaceCleanup for SurfaceProducer.Callback. (#1806) + +[0.13.0] - 2025-03-24 + +* [All] feat: add getBufferedAmount for DataChannel. (#1796) +* [Windows] fix: fixed non-platform thread call error. (#1795) + +[0.12.12+hotfix.1] - 2025-03-12 + +* [Android] fix: fixed video not rendered after resume from background. + +[0.12.12] - 2025-03-09 + +* [Android] feat: Migrate to the new Surface API. (#1726) +* [Chore] chore: fix sponsors logo and links. + +[0.12.11] - 2025-02-23 + +* [web] bump version for dart_webrtc. +* [web] fix: compile error for web with --wasm. + +[0.12.10] - 2025-02-18 + +* [web] bump version for dart_webrtc. +* [web] fix: compile error for web with --wasm. + +[0.12.9] - 2025-02-13 + +* [iOS] feat: Add option to start capture without broadcast picker (#1764) + +[0.12.8] - 2025-02-07 + +* [Dart] feat: expose rtc video value (#1754) +* [Dart] chore: bump webrtc-interface to 1.2.1. + +[0.12.7] - 2025-01-24 + +* [iOS] More robustness for video renderer. (#1751) + +[0.12.6] - 2025-01-20 + +* [iOS] fix In-app screen recording. +* [Android] fix: avoid crashes when surfaceTextureHelper is null. (#1743) + +[0.12.5+hotfix.2] - 2024-12-25 + +* [iOS] fix: Audio route issue for iOS. + +[0.12.5+hotfix.1] - 2024-12-25 + +* [iOS/macOS] fix: Pass MediaConstraints for getUserAudio. + +[0.12.5] - 2024-12-23 + +* [iOS/Android] Fixed buf for screen capture. +* [Android] Fixed first frame flickering. + +[0.12.4] - 2024-12-16 + +* [iOS/Android] add FocusMode/ExposureMode settings for mobile. (#1435) +* [Dart] fix compiler errors. +* [eLinux] add $ORIGIN to rpath in elinux (#1720). + +[0.12.3] - 2024-11-29 + +* [iOS/Android/macOS] feat: Expose AV Processing and Sink native APIs. + +[0.12.2] - 2024-11-26 + +* [Dart] fix: race condition during video renderer initialization. (#1692) +* [Darwin] fix: Add mutex lock to avoid pixelbuffer access contention. (#1694) + +[0.12.1+hotfix.1] - 2024-10-25 + +* [iOS] fix: fix switch camera broken on iOS. + +* [web] fix: add stub WebRTC.initialize for web. +* [Docs] Fixing proguard rules link (#1686) +* [iOS/Android] feat: support unprocessed audio (#825) +* [eLinux] feat: add support for eLinux (#1338) + +[0.12.0+hotfix.1] - 2024-10-18 + +* [macOS] fix compilation error for macOS. + +[0.12.0] - 2024-10-16 + +* [iOS/macOS] Fix memory leak for iOS/macOS. +* [iOS] Support MultiCam Session for iOS. + +[0.11.7] - 2024-09-04 + +* [Web] Bump dart_webrtc to 1.4.9. +* [Web] Bump web version to 1.0.0. + +[0.11.6+hotfix.1] - 2024-08-07 + +* [iOS] Fixed PlatformView not rendering after resuming from background. + +[0.11.6] - 2024-08-02 + +* [Web] change VideoElement to HTMLVideoElement. +* [iOS] added shared singleton for FlutterWebRTCPlugin (#1634) +* [iOS] Using av samplebuffer for PlatformView (#1635) + +[0.11.5] - 2024-07-23 + +* [Android] Report actual sizes for camera media stream track (#1636). + +[0.11.4] - 2024-07-19 + +* [Android] fix issue for camera switching. + +[0.11.3] - 2024-07-12 + +* Bump version for libwebrtc. + +[0.11.2] - 2024-07-09 + +* [Windows] fix crash for windows. +* [Darwin] bump WebRTC version for darwin. + +[0.11.1] - 2024-06-17 + +* [macOS] Downgrade macOS system dependencies to 10.14. + +[0.11.0] - 2024-06-17 + +* [Native] upgrade libwebrtc to m125.6422. + +[0.10.8] - 2024-06-05 + +* [iOS] fix(platform_view): fit cover works wrong (#1593) +* [iOS/macOS] fix: Fix the issue that the video is not displayed when using 'video': true (#1592) +* [Web] bump dart_webrtc to 1.4.6. + +[0.10.7] - 2024-05-30 + +* [iOS] feat: add PlatformView Renderer for iOS. (#1569) +* [iOS] fix: audio session control for iOS. (#1590) + +[0.10.6] - 2024-05-13 + +* [Web] Some important fixes for web. + +[0.10.5] - 2024-05-13 + +* [Android] fix: make MediaDeviceInfo (Audio deviceId, label, groupId) consistent. (#1583) + +[0.10.4] - 2024-05-06 + +* [iOS/macOS] chore: update swift webrtc sdks to 114.5735.10 (#1576) +* [Android] fix: actually call selectAudioOutput in enableSpeakerButPreferBluetooth +* [iOS] fix: remember speakerphone mode for ensureAudioSession (#1568) +* [Windows/Linux] Fix handling of unimplemented method (#1563) + +[0.10.3] - 2024-04-09 + +* [iOS/macOS] Fix compilation warning for iOS/macOS. + +[0.10.2] - 2024-04-08 + +* [Native/Web] feat: add keyRingSize/discardFrameWhenCryptorNotReady to KeyProviderOptions. + +[0.10.1] - 2024-04-08 + +* [Web] fix renderer issue for web. + +[0.10.0] - 2024-04-08 + +* [Web] move to package:web. + +[0.9.48+hotfix.1] - 2024-02-05 + +* [Android] bump version for libwebrtc. + +[0.9.48] - 2024-02-05 + +* [Android] bump version for libwebrtc. +* [iOS] Supports ensureAudioSsession method for iOS only. (#1514) +* [Android] fix android wrong display size. (#1508). + +[0.9.47] - 2023-11-29 + +* [Windows/Linux] fix: Check the invalid value of candidate and session description. (#1484) +* [Windows/Linux/macOS] fix: screen sharing issue for desktop. +* [Web] fix: platformViewRegistry getter is deprecated (#1485) +* [Dart] Throw exception for set src object (#1491). + +[0.9.46] - 2023-10-25 + +* [iOS/macOS] fix: Crop video output size to target settings. (#1472) +* [Android] fix: Fix bluetooth sco not stopping after room disconnect (#1475) + +[0.9.45] - 2023-09-27 + +* [iOS/macOS] fix: send message on non-platform thread. +* [Windows] fix: fix setSrcObj with trackId for Windows. +* [Windows] fix: fix "unlock of unowned mutex" error when call "captureFrame()" func on windows. + +[0.9.44] - 2023-09-25 + +* [Windows] fix: fix Renderer bug for Windows. +* [Native] fix: Use independent threads to process frame encryption/decryption +* [Native] fix: Correct handle SIF frame +* [Native] fix: Fix a fault tolerance judgment failure + +[0.9.43] - 2023-09-20 + +* [Native] fix: send frame cryptor events from signaling thread. +* [Native] fix: h264 freeze when using E2EE. + +[0.9.42+hotfix.1] - 2023-09-15 + +* [Windows/Linux] fix: fix cannot start vp8/h264 encoder correctly. + +[0.9.42] - 2023-09-15 + +* [Dart/Native] feat: add more framcryptor api (#1444) +* [Dart/Native] feat: support scalability mode (#1442) +* [Android] fix: Turn off audio routing in non communication modes (#1438) + +* [Android] feat: Add more control over android audio options. + +[0.9.41] - 2023-08-30 + +* [Android] feat: Add more control over android audio options. + +[0.9.40] - 2023-08-16 + +* [Windows/Linux] fix: nullptr checking for sender/receiver for getStats. + +[0.9.39] - 2023-08-14 + +* [Dart/Native] feat: add async methods for getting pc states. + +[0.9.38] - 2023-08-11 + +* [Android] fix: Expose helper to clearCommunicationDevice on AudioManager.AUDIOFOCUS_LOSS +* [Android] feat: support force SW codec list for android, and disable HW codec for VP9 by default. +* [Android] fix: issue for audio device switch (#1417) +* [Android/iOS] feat: Added setZoom method to support camera zooming while streaming. (#1412). + +[0.9.37] - 2023-08-07 + +* [Native] fix: Skip set_sdp_fmtp_line if sdpFmtpLine is empty. +* [Android] fix: fix android earpiece not being replaced after wired headset is disconnected. +* [Dart] fix: partially rebuild RTCVideoView when renderVideo value changes. +* [Android] feat: expose android audio modes. +* [Android] feat: support forceSWCodec for Android. +* [Linux] fix: add $ORIGIN to rpath. + +[0.9.36] - 2023-07-13 + +* [Native] upgrade libwebrtc to m114.5735.02. +* [Windows/Linux] Add implementation to MediaStreamTrack.captureFrame() for linux/windows. +* [Darwin/Android] Support to ignore network adapters used for ICE on Android, iOS and macOS. + +[0.9.35] - 2023-06-30 + +* [iOS] feat: expose audio mode for ios. +* [Darwin] fix: compiler warning for Darwin. +* [Dart] Fix setMicrophoneMute() not awaitable. +* [Native] Update libwebrtc to m114. +* [Dart/Web] Separate frame cryptor to dart-webrtc. + +[0.9.34] - 2023-06-14 + +* [Web] fix facingMode for flutter web mobile. + +[0.9.33] - 2023-06-08 + +* [Android] fix frame drops for android. + +[0.9.32] - 2023-05-30 + +* [Android] fix issue for get user audio. +* [Android] fix getStats throw LinkedHasMap exception. + +[0.9.31] - 2023-05-23 + +* [Darwin] Improve iOS/macOS H264 encoder (Upgrade to WebRTC-SDK M104.5112.17). + +[0.9.30+hotfix.2] - 2023-05-18 + +* [Windows/Linux] fix bug for eventchannel proxy. +* [Windows/Linux] fix: crash for pc.close/dispose on win/linux. (#1360) + +[0.9.30+hotfix.1] - 2023-05-17 + +* [Windows/Linux] Fix compiler error. + +[0.9.30] - 2023-05-16 + +* [Darwin] Handle exceptions for frame rate settings for darinw. (#1351) +* [Android] Fix bluetooth device enumerate. (#1349) +* [Darwin/Android/Windows/Linux] Added maxIPv6Networks configuration (#1350) +* [iOS] Fix: broadcast extension not found fallback logic (#1347) +* [Android] Move the call of capturer.stopCapture() outside the main thread to avoid blocking of flutter method call. +* [Windows/Linux] Fix the crash issue of video room (#1343) + +[0.9.29+hotfix.1] - 2023-05-08 + +* [Android] fix: application context null when app is terminated. +* [Android/iOS] feat: add way to enable speaker but prefer bluetooth. + +[0.9.28] - 2023-05-08 + +* [Windows/Linux] fix: use the correct transceiver id. +* [Windows/Linux] fix: Support restart camera for Windows/Linux. + +[0.9.27] - 2023-04-27 + +* [Darwin/Android/Windows/Linux] feat: framecryptor. +* [Windows/Linux] Fix the type/code mistake. +* [Windows/Linux] Fix uneffective RTPTransceiver::GetCurrentDirection. +* [Windows/Linux] RTPtransceiver::getCurrentDirection returns correct value. + +[0.9.26] - 2023-04-16 + +* [iOS/macOS] motify h264 profile-level-id to support high resolution. +* [Dawrin/Android/Windows] feat: add RTCDegradationPreference to RTCRtpParameters. + +[0.9.25] - 2023-04-10 + +* [Dawrin/Android/Windows] Add `addStreams` to `RTCRtpSender` +* [Android] fix: label for Wired Headset. (#1305) +* [Dawrin/Android/Windows] Feat/media stream track get settings (#1294) +* [Android/iOS] Fix track lookup in the platform specific code for Android and iOS (#1289) +* [iOS] fix: ICE Connectivity doesn't establish with DualSIM iPhones. +* [Android] Switch to webrtc hosted on maven central (#1288) + +[0.9.24] - 2023-03-07 + +* [iOS] avaudiosession mode changed to AVAudioSessionModeVideoChat (#1285) +* [macOS] fix memory leak for screen capture. + +[0.9.23] - 2023-02-17 + +* [Windows/Linux] Updated libwebrtc binary for windows/linux to fix two crashes. + +[0.9.22] - 2023-02-14 + +* [iOS] fix: Without any setActive for rtc session, libwebrtc manages the session counter by itself. (#1266) +* [dart] fix: remove rtpsender.dispose. +* [web] fix video renderer issue for safari. +* [macOS] Fixed macOS desktop capture crash with simulcast enabled. +* [macOS] Fix the crash when setting the fps of the virtual camera. + +[0.9.21] - 2023-02-10 + +* [Web] Fix: RTCRtpParameters.fromJsObject for Firefox. +* [Web] Add bufferedamountlow. +* [Android] Fixed frame capturer returning images with wrong colors (#1258). +* [Windows] bug fix. + +[0.9.20] - 2023-02-03 + +* [Dawrin/Android/Windows] Add getCapabilities/setCodecPreferences methods +* [Darwin] buffered amount +* [Linux] Fixed audio device name buffer size +* [Android] Start audioswitch and only activate it when needed +* [Darwin] Fix typo which broke GcmCryptoSuites + +[0.9.19] - 2023-01-10 + +* [Dart] Fix getStats: change 'track' to 'trackId' (#1199) +* [Android] keep the audio switch after stopping (#1202) +* [Dart] Enhance RTC video view with placeholder builder property (#1206) +* [Android] Use forked version of audio switch to avoid BLUETOOTH_CONNECT permission (#1218) + +[0.9.18] - 2022-12-12 + +* [Web] Bump dart_webrtc to 1.0.12, Convert iceconnectionstate to connectionstate for Firefox. +* [Android] Start AudioSwitchManager only when audio track added (fix #1163) (#1196) +* [iOS] Implement detachFromEngineForRegistrar (#1192) +* [iOS] Handle Platform Exception on addCandidate (#1190) +* [Native] Code format with clang-format. + +[0.9.17] - 2022-11-28 + +* [Android] Update android webrtc version to 104.5112.05 +* [iOS] Update WebRTC.xframework version to 104.5112.07 + +[0.9.16] - 2022-11-14 + +* [Linux] Fixed compiler error for flutter 3.3.8. +* [Linux] Remove 32-bit precompiled binaries. +* [Linux] Supports linux-x64 and linux-arm64. + +[0.9.15] - 2022-11-13 + +* [Linux] Add Linux Support. + +[0.9.14] - 2022-11-12 + +* [iOS] Fix setSpeakerOn has no effect after change AVAudioSession mode to playback. + +[0.9.13] - 2022-11-12 + +* [Dart] Change MediaStream.clone to async. +* [iOS] Fixed the bug that the mic indicator light was still on when mic recording was stopped. +* [iOS/macOS/Android/Windows] Allow sdpMLineIndex to be null when addCandidate. +* [macOS] Frame capture support for MacOS. +* [Android] Add enableCpuOveruseDetection configuration (#1165). +* [Android] Update comments (#1164). + +[0.9.12] - 2022-11-02 + +* [iOS] Fixed the problem that iOS earphones and speakers do not switch. +* [Windows] fix bug for rtpSender->RemoveTrack/pc->getStats. +* [iOS] Return groupId. +* [Web] MediaRecorder.startWeb() should expose the timeslice parameter. +* [iOS] Implement RTCPeerConnectionDelegate didRemoveIceCandidates method. +* [iOS] fix disposing Broadcast Sharing stream. + +[0.9.11] - 2022-10-16 + +* [iOS] fix audio route/setSpeakerphoneOn issues. +* [Windows] fix: Have same remote streams id then found wrong MediaStream. +* [Dart] feat: RTCVideoRenderer supports specific trackId when setting MediaStream. + +[0.9.9+hotfix.1] - 2022-10-12 + +* [Darwin] Fix getStats for darwin when trackId is NSNull. + +[0.9.9] - 2022-10-12 + +* [Darwin/Android/Windows] Support getStats for RtpSender/RtpReceiver (Migrate from Legacy to Standard Stats for getStats). +* [Android] Dispose streams and connections. +* [Android] Support rtp transceiver direction type 4. +* [Web] Update dart_webrtc dependendency. + +[0.9.8] - 2022-09-30 + +* [Android] fix: Make sure local stream/track dispose correctly. +* [Android] Remove bluetooth permission on peerConnectionInit. +* [iOS] Fix system sound interruption on iOS (#1099). +* [Android] Fix: call mode on app start (#1097). +* [Dart] Avoid renderer initialization multiple times (#1067). + +[0.9.7] - 2022-09-13 + +* [Windows] Support sendDtmf. +* [Windows] Fixed getStats. + +[0.9.6] - 2022-09-06 + +* [Dart] The dc created by didOpenDataChannel needs to set state to open. +* [Dart] Added callback onFirstFrameRendered. + +[0.9.5] - 2022-08-30 + +* [Android] fix: Fix crash when using multiple renderers. +* [Android] fix bug with track dispose cannot close video +* [Andorid/iOS/macOS/Windows] Fix bug of missing events in data-channel. + +[0.9.4] - 2022-08-22 + +* [Andorid/iOS/macOS/Windows] New audio input/output selection API, ondevicechange event is used to monitor audio device changes. + +[0.9.3] - 2022-08-15 + +* [Windows/macOS] Fix UI freeze when getting thumbnails. + +[0.9.2] - 2022-08-09 + +* [Android] update libwebrtc to com.github.webrtc-sdk:android:104.5112.01. +* [iOS/macOS] update WebRTC-SDK to 104.5112.02. +* [Windows] update libwebrtc.dll to 104.5112.02. + +[0.9.1] - 2022-08-01 + +* [iOS] fix : iOS app could not change camera resolutions cause by wrong datatype in the video Contraints. +* [Darwin] bump version for .podspec. + +[0.9.0] - 2022-07-27 + +* [macOS] Added screen-sharing support for macOS +* [Windows] Added screen-sharing support for Windows +* [iOS/macOS] fix: Fix compile warning for Darwin +* [Darwin/Android/Windows] fix: Fix typo peerConnectoinEvent -> peerConnectionEvent for EventChannel name (#1019) + +[0.8.12] - 2022-07-15 + +* [Darwin]: fix: camera release. + +[0.8.11] - 2022-07-11 + +* [Windows] Fix variant exception of findLongInt. (#990) +* [Windows] fix unable to get username/credential when parsing iceServers containing urls +* [iOS] Fix RTCAudioSession properties set with libwebrtc m97, Fixes #987. + +[0.8.10] - 2022-06-28 + +* [iOS] IPC Broadcast Upload Extension support for Screenshare + +[0.8.9] - 2022-06-08 + +* [Android] Fixes DataChannel issue described in #974 +* [iOS] Fixes DataChannel issue described in #974 +* [Dawrin/Android/Windows] Split data channel's webrtc id from our internal id (#961) +* [Windows] Update to m97. +* [Windows] Add PeerConnectionState +* [Windows] Fix can't open mic alone when built-in AEC is enabled. + +[0.8.8] - 2022-05-31 + +* [Android] Added onBufferedAmountChange callback which will return currentBuffer and changedBuffer and implemented bufferedAmount. +* [Android] Added onBufferedAmountLow callback which will return currentBuffer ans will be called if bufferedAmountLowThreshold is set a value. + +[0.8.7] - 2022-05-18 + +* [iOS/macOS] fix: Use RTCYUVHelper instead of external libyuv library (#954). +* [iOS/macOS] Flutter 3.0 crash fixes, setStreamHandler on main thread (#953) +* [Android] Use mavenCentral() instead of jcenter() (#952) +* [Windows] Use uint8_t* instead of string in DataChannel::Send method, fix binary send bug. +* [Android] fix: "Reply already submitted" error and setVolume() not working on remote streams. + +[0.8.6] - 2022-05-08 + +* [Web/Android/iOS/macOS] Support null tracks in replaceTrack/setTrack. +* [macOS] Remove absolute path from resolved spec to make checksum stable. +* [Android] Android 12 bluetooth permissions. +* [Dart] fix wrong id type for data-channel. +* [Android] Release i420 Buffer in FrameCapturer. + +[0.8.5] - 2022-04-01 + +* [Dart] Expose RTCDataChannel.id (#898) +* [Android] Enable H264 high profile for SimulcastVideoEncoderFactoryWrapper (#890) + +[0.8.4] - 2022-03-28 + +* [Android] Fix simulcast factory not sending back EncoderInfo (#891) +* [Android] fix: correct misspell in method screenRequestPermissions (#876) + +[0.8.3] - 2022-03-01 + +* [Android/iOS] Update android/ios webrtc native sdk versions. +* [Windows] Feature of selecting i/o audio devices by passing sourceId and/or deviceId constraints (#851). + +[0.8.2] - 2022-02-08 + +* [Android/iOS/macOS/Web] Add restartIce. + +[0.8.1] - 2021-12-29 + +* [Android/iOS] Bump webrtc-sdk version to 93.4577.01. + +[0.8.0] - 2021-12-05 + +* [Dart] Refactor: Use webrtc interface. (#777) +* [iOS] Fix crashes for FlutterRPScreenRecorder stop. +* [Web] Don't stop tracks when disposing MediaStream (#760) +* [Windows] Add the necessary parameters for onRemoveTrack (#763) +* [Example] Properly start foreground service in example (#764) +* [Android] Fix crash for Android, close #757 and #734. +* [Dart] Fix typo in deprecated annotations. +* [iOS] Fix IOS captureFrame and add support for remote stream captureFrame (#778) +* [Windows] Fix parsing stun configuration (#789) +* [Windows] Fix mute (#792) +* [iOS/Android/Windows] New video constraints syntax (#790) + +[0.7.1] - 2021-11-04 + +* [iOS/macOS] Update framework. +* [Android] Update framework. +* [Windows] Implement mediaStreamTrackSetEnable (#756). +* [iOS/macOS] Enable audio capture when acquiring track. +* [Android] Call stopCaptureWithCompletionHandler instead (#748) +* [Windows] Fix bug for windows. + +[0.7.0+hotfix.2] - 2021-10-21 + +* [iOS/macOS] Update .podspec for Darwin. + +[0.7.0+hotfix.1] - 2021-10-21 + +* [Android] Fix bug for createDataChannel. + +[0.7.0] - 2021-10-20 + +* [Android] Enable Android simulcast (#731) +* [macOS] Use pre-compiled WebRTC for macOS. (#717) +* [iOS/macOS] Fix the correct return value of createDataChannel under darwin. +* [Windows] Fix using the wrong id to listen datachannel events. +* [Dart] Fix(mediaStreamTrackSetEnable): remote track is unavaiable (#723). + +[0.6.10+hotfix.1] - 2021-10-01 + +* [Web] Fix compiler errors for web. + +[0.6.10] - 2021-10-01 + +* [iOS] Fix bug for RtpTransceiver.getCurrentDirection. +* [Dart] Improve MethodChannel calling. + +[0.6.9] - 2021-10-01 + +* [iOS] Update WebRTC build (#707). +* [Windows] Add Unified-Plan support for windows. (#688) +* [iOS] Improve audio handling on iOS (#705) + +[0.6.8] - 2021-09-27 + +* [Android] Use ApplicationContext to verify permissions when activity is null. +* [iOS] Add support for lightning microphone. (#693) +* [Windows] Fix FlutterMediaStream::GetSources. +* [Web] Fix Flutter 2.5.0 RTCVideoRendererWeb bug (#681) +* [Web] Bug fix (#679) + +[0.6.7] - 2021-09-08 + +* [Android] upgrade webrtc sdk to m92.92.4515. +* [Web] `addTransceiver` bug fix (#675) +* [Web] Use low-level jsutil to call createOffer/createrAnswer to solve the issue on safari/firefox. +* [Dart] Fix currentDirection/direction implementation confusion. + +[0.6.6] - 2021.09.01 + +* [Sponsorship] Thanks for LiveKit sponsorship. +* [Web] Avoid removing all audio elements when stopping a single video renderer (#667) +* [Web] Properly cleanup srcObject to avoid accidental dispose +* [Dart] Removed warnings (#647) +* [Web] Switch transferFromImageBitmap to be invoked using js.callMethod (#631) +* [Web] Fix sending binary data over DataChannel in web implementation. (#634) +* [Darwin] Nullable return for GetLocalDescription/GetRemoteDiscription +* [Darwin] Fix incorrect argument name at RTCRtpSender (#600) + +[0.6.5] - 2021.06.18 + +* [Android] Falling back to the first available camera fix #580 +* [Android] Fix application exit null-pointer exception (#582) +* [Dart] Add label getter to DataChannel Interface (#585) +* [Dart] Fix exception raised at RTCPeerConnection.removeTrack and RTCRtpSender.setParameters (#588) +* [Dart] Fix: null check (#595) +* [Dart] Fix: null check for RTCRtpTransceiverNative.fromMap + +[0.6.4] - 2021.05.02 + +* [Android] Fix getting screen capture on Huawei only successful in the first time. (#523) +* [Android] Add configuration "cryptoOptions" in parseRTCConfiguration(). +* [Dart] Change getLocalDescription,getRemoteDescription,RTCRtpSenderWeb.track returns to nullable. +* [Dart] Fixed bug in RTCPeerConnectionWeb.removeTrack. +* [Dart] Change MediaStreamTrack.captureFrame returns to ByteBuffer to compatible with web API. +* [Dart] Do null safety check in onRemoveStream,onRemoveTrack and MediaStream.getTrackById. +* [Android] Add reStartCamera method when the camera is preempted by other apps. +* [Web] Refactored RTCVideoRendererWeb and RTCVideoViewWeb, using video and audio HTML tags to render audio and video streams separately. + +[0.6.3] - 2021.04.03 + +* [Dart] Change RTCRtpSender.track to nullable. +* [Web] Fix RTCVideoView/Renderer pauses when changing child in IndexedStack. + +[0.6.2] - 2021.04.02 + +* [Dart] Use enumerateDevices instead of getSources. +* [Android] Use flutter_background to fix screen capture example. + +[0.6.1] - 2021.04.02 + +* [Darwin] Fixed getting crash when call setLocalDescription multiple time. +* [Dart] Get more pub scores. + +[0.6.0] - 2021.04.01 + +* [Sponsorship] Thanks for Stream sponsorship (#475) +* [Android] Fixed a crash when switching cameras on Huawei devices. +* [Windows] Correct signalingState & iceConnectionState event name on Windows. (#502) +* [Dart] Clip behaviour. (#511) +* [Dart] null-safety (@wer-mathurin Thanks for the hard work). +* [Dart] Fix setMicrophoneMute (#466) +* [Web] Fix pc.addTransceiver method, fix RTCRtpMediaType to string, fix (#437) +* [Android] fix sdpSemantics issue (#478) + +[0.6.0-nullsafety.0] - 2021.03.22 + +* [Dart] null-safety (@wer-mathurin Thanks for the hard work). + +[0.5.8] - 2021.01.26 + +* [Web] Support selecting audio output. +* [Web] Fix issue for getDisplayMedia with audio. +* [Windows] Add Windows Support. +* [macOS] Fix compile error for macos. +* [Dart] Add FilterQuality to RTCVideoView. +* [iOS/Android] Unified plan gettracks. +* [iOS/Android] Bluetooth switching enabled when switching `enableSpeakerphone` value (if they are connected). #201 (#435) +* [Android] Increase necessary Android min SDK version after add Unified-Plan API. + +[0.5.7] - 2020.11.21 + +* [Web] Fix events callback for peerconnection. + +[0.5.6] - 2020.11.21 + +* [Android/Darwin/Web] Add onResize event for RTCVideoRenderer. + +[0.5.5] - 2020.11.21 + +* [Android/Darwin] Fix Simulcast issue. + +[0.5.4] - 2020.11.21 + +* [Native/Web] Add videoWidth/videoHeight getter for RTCVideoRenderer. +* [Web] Add optional parameter track to call getStats. + +[0.5.3] - 2020.11.21 + +* Fix bug. + +[0.5.2] - 2020.11.19 + +* Improve web code + +[0.5.1] - 2020.11.19 + +* Improve unfied-plan API for web. +* Add getTransceivers,getSenders, getReceivers methods. + +[0.5.0+1] - 2020.11.18 + +* Remove dart-webrtc and reuse the code in dart:html + because the code generated by package:js cannot be run in dart2js. + +[0.5.0] - 2020.11.15 + +* [Web] Add Unified-Plan for Flutter Web. +* [Web] Add video frame mirror support for web. +* [Web] Support Simulcast for web. +* [Web] Use dart-webrtc as flutter web plugin. +* [Android/Darwin] Fix crash when unset streamIds in RtpTransceiverInit. +* [Dart]Change the constraints of createOffer/createAnswer as optional. +* [iOS]Fix adding track to stream igal committed (#413) + +[0.4.1] - 2020.11.11 + +* Add transceiver to onTrack events. +* Remove unnecessary log printing. +* Fixed a crash caused by using GetTransceivers under non-unified-plan, + close #389. +* FIX - Invalid type inference (#392) +* [Web]Add onEnded and onMuted for Web (#387) +* [Darwin]Fix PeerConnectionState for darwin. +* [Darwin] Fix compilation warning under darwin. +* [Android] Fixed 'Sender is null' issue when removing track. (#401) +* [iOS] fix removeTrack methodChannel response, onTrack's `stream` and `track` not being registered in native. +* [Darwin/Android] `RtpSender` `setParameters` functionality. + +[0.4.0] - 2020.10.14 + +* Support Unified-Plan for Android/iOS/macOS. +* Add PeerConnectionState and add RTCTrackEvent.. +* [Android] Upgrade GoogleWebRTC@android to 1.0.32006. +* [iOS] Upgrade GoogleWebRTC@ios to 1.1.31999. +* Api standardization across implementation (#364), thanks @wer-mathurin. + +[0.3.3] - 2020.09.14 + +* Add RTCDTMFSender for mobile, web and macOS. +* Improve RenegotiationNeededCallback. +* Refactor RTCVideoView for web and solve the resize problem. +* Reduce code size. + +[0.3.2] - 2020.09.11 + +* Reorganize the directory structure. +* Replace class name navigator to MediaDevices. +* Downgrade pedantic version to 1.9.0. + +[0.3.1] - 2020.09.11 + +* [Dart] Apply pedantic linter and more rigorous analysis options. + +[0.3.0+1] - 2020.09.06 + +* [Dart] FIX - missing null check onIceGatheringState (web) + +[0.3.0] - 2020.09.05 + +* [Dart] Improve RTCVideoView. +* [Android] Refactors Android plugin alongside the embedding V2 migration. +* [Dart] Fix .ownerTag not defined for web. +* [Dart] Added label as read only property. +* [macOS] Updated WebRTC framework to work with AppStoreConnect. +* [Dart] Make 'constraints' argument optional. +* [Dart] Make createOffer constraints optional. +* [iOS/Android/Web] Adding createLocalMediaStream method to PeerConnectionFactory. +* [Web] Fixing multiple video renderers on the same HTML page for Flutter Web. +* [iOS] Add peerConnectionId to data channel EventChannel. +* [Android] Add library module ProGuard configuration file. +* [iOS] Fix typo in render size change check condition +* [README] Add missed Android usage hint. + +[0.2.8] - 2020.04.22 + +* [macOS/iOS] Fix typo in render size change check condition. +* [macOS] Fix hot restart videoCapturer crash. +* [Android] Fix Android crash when getUserVideo. + +[0.2.7] - 2020.03.15 + +* [macOS] Fix crash with H264 HW Encoder. +* [Web] Add addTransceiver API. +* [Android] Removed duplicate method that was causing compilation error. +* [iOS] Use MrAlek Libyuv pod fixing incompatibility with FirebaseFirestore. +* [iOS] Upgrade GoogleWebRTC dependency to 1.1.29400. + +[0.2.6] - 2020.02.03 + +* Fixed the interruption of the Bluetooth headset that was playing music after the plugin started. + +[0.2.4] - 2020.02.03 + +* Fixed bug. + +[0.2.3] - 2020.02.03 + +* Fixed bug for enableSpeakerphone (Android/iOS). +* Fix RtcVideoView not rebuild when setState called and renderer is changed. +* Fix Android frame orientation. + +[0.2.2] - 2019.12.13 + +* Removed the soft link of WebRTC.framework to fix compile errors of macos version when third-party flutter app depends on plugins + +[0.2.1] - 2019.12.12 + +* Code format. +* Remove unused files. + +[0.2.0] - 2019.12.12 + +* Add support for macOS (channel dev). +* Add support for Flutter Web (channel dev). +* Add hasTorch support for Android (Camera2 API) and iOS. +* Fix(PeerConnections) split dispose and close +* Add microphone mute support for Android/iOS. +* Add enable speakerphone support for Android/iOS. +* Fix 'createIceServer' method Invalid value error (Android). +* Store SignalingState/IceGatheringState/IceConnectionState in RTCPeerConnection. +* Fixed rendering issues caused by remote MediaStream using the same msid/label when using multiple PeerConntions. + +[0.1.7] - 2019.05.16 + +* Add RTCDataChannelMessage for data channel and remove base64 type. +* Add streaming API for datachannel messages and state changes. +* Remove cloudwebrtc prefix in the plugin method/event channel name. +* Other bug fixes. + +[0.1.6] - 2019.03.31 + +* Add getConfiguration/setConfiguration methods for Peerconnection. +* Add object fit for RTCVideoView. + +[0.1.5] - 2019.03.27 + +* Remove unnecessary parameter for getLocalDescription method. + +[0.1.4] - 2019.03.26 + +* switchCamera method now returns future with isFrontCamera as result +* Fix camera stuck in rare cases +* Add getLocalDescription/getRemoteDescription methods + +[0.1.3] - 2019.03.25 + +* Add horizontal flip (mirror) function for RTCVideoView. +* Fixed ScreenCapture preview aspect ratio for Android. + +[0.1.2] - 2019.03.24 + +* Fix compilation failure caused by invalid code. + +[0.1.1] - 2019.03.24 + +* Migrated to AndroidX using Refactoring from Andoid Studio +* Fix mediaStreamTrackSetEnable not working. +* Fix iOS can't render video when resolution changes. +* Some code style changes. + +[0.1.0] - 2019.01.21 + +* Fix camera switch issues. +* Support data channel, please use the latest demo to test. +* Support screen sharing, but the work is not perfect, there is a problem with the local preview. + +[0.0.3] - 2018.12.20 + +* Update WebRTC to 1.0.25821. +* Implemented MediaStreamTrack.setVolume(). +* Add public getter for texture id. +* Fixed getUserMedia does not work for capturing audio only. + +[0.0.2] - 2018.11.04 + +* Add 'enabled' method for MediaStreamTrack. +* Implement switch camera. +* Add arm64-v8a and x86_64 architecture support for android. + +[0.0.1] - 2018.05.30 * Initial release. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..bbab725aab --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,7 @@ +# Contributing + +We love contributions from everyone, whether it's raising an issue, reporting a bug, adding a feature, or helping improve a document. +Maintaining the flutter-webrtc plugin for all platforms is not an easy task, so everything you do is support for the project. + +# Pull Request +We recommend that you create a related issue before PR so that others can find the answers they want in the issues. diff --git a/Documentation/E2EE.md b/Documentation/E2EE.md new file mode 100644 index 0000000000..a91c8233f7 --- /dev/null +++ b/Documentation/E2EE.md @@ -0,0 +1,92 @@ +# End to End Encryption + +E2EE is an AES-GCM encryption interface injected before sending the packaged RTP packet and after receiving the RTP packet, ensuring that the data is not eavesdropped when passing through SFU or any public transmission network. It coexists with DTLS-SRTP as two layers of encryption. You can control the key, ratchet and other operations of FrameCryptor yourself to ensure that no third party will monitor your tracks. + +## Process of enabling E2EE + +1, Prepare the key provider + +`ratchetSalt` is used to add to the mixture when ratcheting or deriving AES passwords +`aesKey` aesKey is the plaintext password you entered, which will be used to derive the actual password + +```dart + final aesKey = 'you-private-key-here'.codeUnits; + final ratchetSalt = 'flutter-webrtc-ratchet-salt'; + + var keyProviderOptions = KeyProviderOptions( + sharedKey: true, + ratchetSalt: Uint8List.fromList(ratchetSalt.codeUnits), + ratchetWindowSize: 16, + failureTolerance: -1, + ); + + var keyProvider = await frameCyrptorFactory.createDefaultKeyProvider(keyProviderOptions); + /// set shared key for all track, default index is 0 + /// also you can set multiple keys by different indexes + await keyProvider.setSharedKey(key: aesKey); +``` + +2, create PeerConnectioin + +when you use E2EE on the web, please add `encodedInsertableStreams`, + +``` dart +var pc = await createPeerConnection( { + 'encodedInsertableStreams': true, + }); +``` + +3, Enable FrameCryptor for RTPSender. + +```dart +var stream = await navigator.mediaDevices + .getUserMedia({'audio': true, 'video': false }); +var audioTrack = stream.getAudioTracks(); +var sender = await pc.addTrack(audioTrack, stream); + +var trackId = audioTrack?.id; +var id = 'audio_' + trackId! + '_sender'; + +var frameCyrptor = + await frameCyrptorFactory.createFrameCryptorForRtpSender( + participantId: id, + sender: sender, + algorithm: Algorithm.kAesGcm, + keyProvider: keyProvider!); +/// print framecyrptor state +frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('EN onFrameCryptorStateChanged $participantId $state'); + +/// set currently shared key index +await frameCyrptor.setKeyIndex(0); + +/// enable encryption now. +await frameCyrptor.setEnabled(true); +``` + +4, Enable FrameCryptor for RTPReceiver + +```dart + +pc.onTrack((RTCTrackEvent event) async { + var receiver = event.receiver; + var trackId = event.track?.id; + var id = event.track.kind + '_' + trackId! + '_receiver'; + + var frameCyrptor = + await frameCyrptorFactory.createFrameCryptorForRtpReceiver( + participantId: id, + receiver: receiver, + algorithm: Algorithm.kAesGcm, + keyProvider: keyProvider); + + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('DE onFrameCryptorStateChanged $participantId $state'); + + /// set currently shared key index + await frameCyrptor.setKeyIndex(0); + + /// enable encryption now. + await frameCyrptor.setEnabled(true); +}); +``` diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..8e65a49ff2 --- /dev/null +++ b/NOTICE @@ -0,0 +1,51 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +SimulcastVideoEncoderFactoryWrapper.kt + +Apache License 2.0 + +Copyright 2017, Lyo Kato (Original Author) +Copyright 2017-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### + +react-native-webrtc +https://github.com/react-native-webrtc/react-native-webrtc + +The MIT License (MIT) + +Copyright (c) 2015 Howard Yang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 773e65d1d9..2715e5a31c 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,163 @@ -# flutter-webrtc -Flutter WebRTC plugin for iOS/Android - -## screenshots -# iOS - -# Android - +# Flutter-WebRTC + +[![Financial Contributors on Open Collective](https://opencollective.com/flutter-webrtc/all/badge.svg?label=financial+contributors)](https://opencollective.com/flutter-webrtc) [![pub package](https://img.shields.io/pub/v/flutter_webrtc.svg)](https://pub.dartlang.org/packages/flutter_webrtc) [![Gitter](https://badges.gitter.im/flutter-webrtc/Lobby.svg)](https://gitter.im/flutter-webrtc/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![slack](https://img.shields.io/badge/join-us%20on%20slack-gray.svg?longCache=true&logo=slack&colorB=brightgreen)](https://join.slack.com/t/flutterwebrtc/shared_invite/zt-q83o7y1s-FExGLWEvtkPKM8ku_F8cEQ) + +WebRTC plugin for Flutter Mobile/Desktop/Web + +
+

+Sponsored with 💖   by
+ +Stream Chat + +
+Enterprise Grade APIs for Feeds, Chat, & Video. Try the Flutter Video tutorial 💬 +

+ +
+

+ +LiveKit + +
+ LiveKit - Open source WebRTC and realtime AI infrastructure +

+ +## Functionality + +| Feature | Android | iOS | [Web](https://flutter.dev/web) | macOS | Windows | Linux | [Embedded](https://github.com/sony/flutter-elinux) | [Fuchsia](https://fuchsia.dev/) | +| :-------------: | :-------------:| :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | +| Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Screen Capture | :heavy_check_mark: | [:heavy_check_mark:(*)](https://github.com/flutter-webrtc/flutter-webrtc/wiki/iOS-Screen-Sharing) | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Simulcast | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| MediaRecorder | :warning: | :warning: | :heavy_check_mark: | | | | | | +| End to End Encryption | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | +| Insertable Streams | | | | | | | | | + +Additional platform/OS support from the other community + +- flutter-tizen: +- flutter-elinux(WIP): + +Add `flutter_webrtc` as a [dependency in your pubspec.yaml file](https://flutter.io/using-packages/). + +### iOS + +Add the following entry to your _Info.plist_ file, located in `/ios/Runner/Info.plist`: + +```xml +NSCameraUsageDescription +$(PRODUCT_NAME) Camera Usage! +NSMicrophoneUsageDescription +$(PRODUCT_NAME) Microphone Usage! +``` + +This entry allows your app to access camera and microphone. + +### Note for iOS + +The WebRTC.xframework compiled after the m104 release no longer supports iOS arm devices, so need to add the `config.build_settings['ONLY_ACTIVE_ARCH'] = 'YES'` to your ios/Podfile in your project + +ios/Podfile + +```ruby +post_install do |installer| + installer.pods_project.targets.each do |target| + flutter_additional_ios_build_settings(target) + target.build_configurations.each do |config| + # Workaround for https://github.com/flutter/flutter/issues/64502 + config.build_settings['ONLY_ACTIVE_ARCH'] = 'YES' # <= this line + end + end +end +``` + +### Android + +Ensure the following permission is present in your Android Manifest file, located in `/android/app/src/main/AndroidManifest.xml`: + +```xml + + + + + + + +``` + +If you need to use a Bluetooth device, please add: + +```xml + + +``` + +The Flutter project template adds it, so it may already be there. + +Also you will need to set your build settings to Java 8, because official WebRTC jar now uses static methods in `EglBase` interface. Just add this to your app level `build.gradle`: + +```groovy +android { + //... + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } +} +``` + +If necessary, in the same `build.gradle` you will need to increase `minSdkVersion` of `defaultConfig` up to `23` (currently default Flutter generator set it to `16`). + +### Important reminder + +When you compile the release apk, you need to add the following operations, +[Setup Proguard Rules](https://github.com/flutter-webrtc/flutter-webrtc/blob/main/android/proguard-rules.pro) + +## Contributing + +The project is inseparable from the contributors of the community. + +- [CloudWebRTC](https://github.com/cloudwebrtc) - Original Author +- [RainwayApp](https://github.com/rainwayapp) - Sponsor +- [亢少军](https://github.com/kangshaojun) - Sponsor +- [ION](https://github.com/pion/ion) - Sponsor +- [reSipWebRTC](https://github.com/reSipWebRTC) - Sponsor +- [沃德米科技](https://github.com/woodemi)-[36记手写板](https://www.36notes.com) - Sponsor +- [阿斯特网络科技有限公司](https://www.astgo.net/) - Sponsor + +### Example + +For more examples, please refer to [flutter-webrtc-demo](https://github.com/cloudwebrtc/flutter-webrtc-demo/). + +## Contributors + +### Code Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + +### Financial Contributors + +Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/flutter-webrtc/contribute)] + +#### Individuals + + + +#### Organizations + +Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/flutter-webrtc/contribute)] + + + + + + + + + + + diff --git a/analysis_options.yaml b/analysis_options.yaml new file mode 100644 index 0000000000..e125cd7523 --- /dev/null +++ b/analysis_options.yaml @@ -0,0 +1,48 @@ +include: package:lints/recommended.yaml + +linter: + rules: + - always_declare_return_types + - avoid_empty_else + - await_only_futures + - avoid_returning_null_for_void + - camel_case_extensions + - camel_case_types + - cancel_subscriptions + - flutter_style_todos + - sort_constructors_first + - sort_unnamed_constructors_first + - sort_pub_dependencies + - type_init_formals + - unnecessary_brace_in_string_interps + - unnecessary_const + - unnecessary_new + - unnecessary_getters_setters + - unnecessary_null_aware_assignments + - unnecessary_null_in_if_null_operators + - unnecessary_overrides + - unnecessary_parenthesis + - unnecessary_statements + - unnecessary_string_interpolations + - unnecessary_this + - unrelated_type_equality_checks + - use_rethrow_when_possible + - valid_regexps + - void_checks + +analyzer: + errors: + # treat missing required parameters as a warning (not a hint) + missing_required_param: warning + # treat missing returns as a warning (not a hint) + missing_return: warning + # allow having TODOs in the code + todo: ignore + # allow self-reference to deprecated members (we do this because otherwise we have + # to annotate every member in every test, assert, etc, when we deprecate something) + deprecated_member_use_from_same_package: ignore + # Conflict with import_sorter + directives_ordering: ignore + constant_identifier_names: ignore + deprecated_member_use: ignore + implementation_imports: ignore diff --git a/android/build.gradle b/android/build.gradle index 442919b18d..f9b288f868 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -2,43 +2,58 @@ group 'com.cloudwebrtc.webrtc' version '1.0-SNAPSHOT' buildscript { + ext.kotlin_version = '1.7.10' repositories { google() - jcenter() + mavenCentral() } dependencies { - classpath 'com.android.tools.build:gradle:3.0.1' + classpath 'com.android.tools.build:gradle:7.1.1' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" } } rootProject.allprojects { repositories { google() - jcenter() + mavenCentral() + maven { url 'https://jitpack.io' } } } apply plugin: 'com.android.library' +apply plugin: 'kotlin-android' android { - compileSdkVersion 23 + if (project.android.hasProperty("namespace")) { + namespace 'com.cloudwebrtc.webrtc' + } + compileSdkVersion 31 defaultConfig { minSdkVersion 21 - testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" - ndk { - abiFilters "armeabi-v7a", "x86" - } + testInstrumentationRunner 'androidx.test.runner.AndroidJUnitRunner' + consumerProguardFiles 'proguard-rules.pro' } + lintOptions { disable 'InvalidPackage' } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = '1.8' + } } dependencies { - implementation fileTree(dir: 'libs', include: ['*.jar']) - implementation "com.android.support:appcompat-v7:23.0.1" - implementation "com.android.support:support-v4:23.0.1" - implementation "com.android.support:support-annotations:22.0.0" -} \ No newline at end of file + implementation 'io.github.webrtc-sdk:android:125.6422.03' + implementation 'com.github.davidliu:audioswitch:89582c47c9a04c62f90aa5e57251af4800a62c9a' + implementation 'androidx.annotation:annotation:1.1.0' + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" +} diff --git a/android/gradle.properties b/android/gradle.properties index 8bd86f6805..e60119f371 100644 --- a/android/gradle.properties +++ b/android/gradle.properties @@ -1 +1,4 @@ org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true + diff --git a/android/libs/libjingle_peerconnection.so.jar b/android/libs/libjingle_peerconnection.so.jar deleted file mode 100644 index d3e151e0dc..0000000000 Binary files a/android/libs/libjingle_peerconnection.so.jar and /dev/null differ diff --git a/android/libs/libwebrtc.jar b/android/libs/libwebrtc.jar deleted file mode 100644 index 0e997c53fc..0000000000 Binary files a/android/libs/libwebrtc.jar and /dev/null differ diff --git a/android/proguard-rules.pro b/android/proguard-rules.pro new file mode 100644 index 0000000000..6ce9896196 --- /dev/null +++ b/android/proguard-rules.pro @@ -0,0 +1,3 @@ +# Flutter WebRTC +-keep class com.cloudwebrtc.webrtc.** { *; } +-keep class org.webrtc.** { *; } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java b/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java index e355b0c953..d168ff4220 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java @@ -5,42 +5,80 @@ import org.webrtc.CameraVideoCapturer; class CameraEventsHandler implements CameraVideoCapturer.CameraEventsHandler { + public enum CameraState { + NEW, + OPENING, + OPENED, + CLOSED, + DISCONNECTED, + ERROR, + FREEZED + } private final static String TAG = FlutterWebRTCPlugin.TAG; + private CameraState state = CameraState.NEW; + + public void waitForCameraOpen() { + Log.d(TAG, "CameraEventsHandler.waitForCameraOpen"); + while (state != CameraState.OPENED && state != CameraState.ERROR) { + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + public void waitForCameraClosed() { + Log.d(TAG, "CameraEventsHandler.waitForCameraClosed"); + while (state != CameraState.CLOSED && state != CameraState.ERROR) { + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } // Camera error handler - invoked when camera can not be opened // or any camera exception happens on camera thread. @Override public void onCameraError(String errorDescription) { Log.d(TAG, String.format("CameraEventsHandler.onCameraError: errorDescription=%s", errorDescription)); + state = CameraState.ERROR; } // Called when camera is disconnected. @Override public void onCameraDisconnected() { Log.d(TAG, "CameraEventsHandler.onCameraDisconnected"); + state = CameraState.DISCONNECTED; } // Invoked when camera stops receiving frames @Override public void onCameraFreezed(String errorDescription) { Log.d(TAG, String.format("CameraEventsHandler.onCameraFreezed: errorDescription=%s", errorDescription)); + state = CameraState.FREEZED; } // Callback invoked when camera is opening. @Override public void onCameraOpening(String cameraName) { Log.d(TAG, String.format("CameraEventsHandler.onCameraOpening: cameraName=%s", cameraName)); + state = CameraState.OPENING; } // Callback invoked when first camera frame is available after camera is opened. @Override public void onFirstFrameAvailable() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + state = CameraState.OPENED; } // Callback invoked when camera closed. @Override public void onCameraClosed() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + state = CameraState.CLOSED; } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java index 8b55410305..83f316a036 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java @@ -1,29 +1,32 @@ package com.cloudwebrtc.webrtc; -import java.nio.charset.Charset; -import android.util.Base64; +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; import org.webrtc.DataChannel; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; + +import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; class DataChannelObserver implements DataChannel.Observer, EventChannel.StreamHandler { - private final int mId; - private final DataChannel mDataChannel; - private final String peerConnectionId; - private final FlutterWebRTCPlugin plugin; - private EventChannel eventChannel; + + private final String flutterId; + private final DataChannel dataChannel; + + private final EventChannel eventChannel; private EventChannel.EventSink eventSink; + private final ArrayList eventQueue = new ArrayList(); - DataChannelObserver(FlutterWebRTCPlugin plugin, String peerConnectionId, int id, DataChannel dataChannel) { - this.peerConnectionId = peerConnectionId; - mId = id; - mDataChannel = dataChannel; - this.plugin = plugin; - this.eventChannel = - new EventChannel( - plugin.registrar().messenger(), - "cloudwebrtc.com/WebRTC/dataChannelEvent" + dataChannel); + DataChannelObserver(BinaryMessenger messenger, String peerConnectionId, String flutterId, + DataChannel dataChannel) { + this.flutterId = flutterId; + this.dataChannel = dataChannel; + eventChannel = + new EventChannel(messenger, "FlutterWebRTC/dataChannelEvent" + peerConnectionId + flutterId); eventChannel.setStreamHandler(this); } @@ -43,23 +46,34 @@ private String dataChannelStateString(DataChannel.State dataChannelState) { @Override public void onListen(Object o, EventChannel.EventSink sink) { - eventSink = sink; + eventSink = new AnyThreadSink(sink); + for(Object event : eventQueue) { + eventSink.success(event); + } + eventQueue.clear(); } @Override public void onCancel(Object o) { eventSink = null; } - + @Override - public void onBufferedAmountChange(long amount) { } + public void onBufferedAmountChange(long amount) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "dataChannelBufferedAmountChange"); + params.putInt("id", dataChannel.id()); + params.putLong("bufferedAmount", dataChannel.bufferedAmount()); + params.putLong("changedAmount", amount); + sendEvent(params); + } @Override public void onStateChange() { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "dataChannelStateChanged"); - params.putInt("id", mDataChannel.id()); - params.putString("state", dataChannelStateString(mDataChannel.state())); + params.putInt("id", dataChannel.id()); + params.putString("state", dataChannelStateString(dataChannel.state())); sendEvent(params); } @@ -67,7 +81,7 @@ public void onStateChange() { public void onMessage(DataChannel.Buffer buffer) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "dataChannelReceiveMessage"); - params.putInt("id", mDataChannel.id()); + params.putInt("id", dataChannel.id()); byte[] bytes; if (buffer.data.hasArray()) { @@ -79,17 +93,20 @@ public void onMessage(DataChannel.Buffer buffer) { if (buffer.binary) { params.putString("type", "binary"); - params.putString("data", Base64.encodeToString(bytes, Base64.NO_WRAP)); + params.putByte("data", bytes); } else { params.putString("type", "text"); - params.putString("data", new String(bytes, Charset.forName("UTF-8"))); + params.putString("data", new String(bytes, StandardCharsets.UTF_8)); } sendEvent(params); } - void sendEvent(ConstraintsMap params) { - if(eventSink != null) + private void sendEvent(ConstraintsMap params) { + if (eventSink != null) { eventSink.success(params.toMap()); + } else { + eventQueue.add(params.toMap()); + } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java new file mode 100644 index 0000000000..199afdc4b9 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java @@ -0,0 +1,434 @@ +package com.cloudwebrtc.webrtc; + +import android.util.Log; + +import androidx.annotation.NonNull; + +import org.webrtc.FrameCryptor; +import org.webrtc.FrameCryptorAlgorithm; +import org.webrtc.FrameCryptorFactory; +import org.webrtc.FrameCryptorKeyProvider; +import org.webrtc.RtpReceiver; +import org.webrtc.RtpSender; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; + +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.ConstraintsArray; + +public class FlutterRTCFrameCryptor { + + class FrameCryptorStateObserver implements FrameCryptor.Observer, EventChannel.StreamHandler { + public FrameCryptorStateObserver(BinaryMessenger messenger, String frameCryptorId){ + this.frameCryptorId = frameCryptorId; + eventChannel = new EventChannel(messenger, "FlutterWebRTC/frameCryptorEvent" + frameCryptorId); + eventChannel.setStreamHandler(new EventChannel.StreamHandler() { + @Override + public void onListen(Object o, EventChannel.EventSink sink) { + eventSink = new AnyThreadSink(sink); + for(Object event : eventQueue) { + eventSink.success(event); + } + eventQueue.clear(); + } + @Override + public void onCancel(Object o) { + eventSink = null; + } + }); + } + private final EventChannel eventChannel; + private EventChannel.EventSink eventSink; + private final ArrayList eventQueue = new ArrayList(); + private final String frameCryptorId; + + @Override + public void onListen(Object arguments, EventChannel.EventSink events) { + eventSink = new AnyThreadSink(events); + for(Object event : eventQueue) { + eventSink.success(event); + } + eventQueue.clear(); + } + + @Override + public void onCancel(Object arguments) { + eventSink = null; + } + + private String frameCryptorErrorStateToString( FrameCryptor.FrameCryptionState state) { + switch (state) { + case NEW: + return "new"; + case OK: + return "ok"; + case DECRYPTIONFAILED: + return "decryptionFailed"; + case ENCRYPTIONFAILED: + return "encryptionFailed"; + case INTERNALERROR: + return "internalError"; + case KEYRATCHETED: + return "keyRatcheted"; + case MISSINGKEY: + return "missingKey"; + default: + throw new IllegalArgumentException("Unknown FrameCryptorErrorState: " + state); + } + } + + @Override + public void onFrameCryptionStateChanged(String participantId, FrameCryptor.FrameCryptionState state) { + Map event = new HashMap<>(); + event.put("event", "frameCryptionStateChanged"); + event.put("participantId", participantId); + event.put("state",frameCryptorErrorStateToString(state)); + if (eventSink != null) { + eventSink.success(event); + } else { + eventQueue.add(event); + } + } + } + + private static final String TAG = "FlutterRTCFrameCryptor"; + private final Map frameCryptos = new HashMap<>(); + private final Map frameCryptoObservers = new HashMap<>(); + private final Map keyProviders = new HashMap<>(); + private final StateProvider stateProvider; + public FlutterRTCFrameCryptor(StateProvider stateProvider) { + this.stateProvider = stateProvider; + } + public boolean handleMethodCall(MethodCall call, @NonNull Result result) { + String method_name = call.method; + Map params = (Map) call.arguments; + if (method_name.equals("frameCryptorFactoryCreateFrameCryptor")) { + frameCryptorFactoryCreateFrameCryptor(params, result); + } else if (method_name.equals("frameCryptorSetKeyIndex")) { + frameCryptorSetKeyIndex(params, result); + } else if (method_name.equals("frameCryptorGetKeyIndex")) { + frameCryptorGetKeyIndex(params, result); + } else if (method_name.equals("frameCryptorSetEnabled")) { + frameCryptorSetEnabled(params, result); + } else if (method_name.equals("frameCryptorGetEnabled")) { + frameCryptorGetEnabled(params, result); + } else if (method_name.equals("frameCryptorDispose")) { + frameCryptorDispose(params, result); + } else if (method_name.equals("frameCryptorFactoryCreateKeyProvider")) { + frameCryptorFactoryCreateKeyProvider(params, result); + }else if (method_name.equals("keyProviderSetSharedKey")) { + keyProviderSetSharedKey(params, result); + } else if (method_name.equals("keyProviderRatchetSharedKey")) { + keyProviderRatchetSharedKey(params, result); + } else if (method_name.equals("keyProviderExportSharedKey")) { + keyProviderExportKey(params, result); + } else if (method_name.equals("keyProviderSetKey")) { + keyProviderSetKey(params, result); + } else if (method_name.equals("keyProviderRatchetKey")) { + keyProviderRatchetKey(params, result); + } else if (method_name.equals("keyProviderExportKey")) { + keyProviderExportKey(params, result); + } else if (method_name.equals("keyProviderSetSifTrailer")) { + keyProviderSetSifTrailer(params, result); + } else if (method_name.equals("keyProviderDispose")) { + keyProviderDispose(params, result); + } else { + return false; + } + return true; + } + + private FrameCryptorAlgorithm frameCryptorAlgorithmFromInt(int algorithm) { + switch (algorithm) { + case 0: + return FrameCryptorAlgorithm.AES_GCM; + default: + return FrameCryptorAlgorithm.AES_GCM; + } + } + + private void frameCryptorFactoryCreateFrameCryptor(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("frameCryptorFactoryCreateFrameCryptorFailed", "keyProvider not found", null); + return; + } + String peerConnectionId = (String) params.get("peerConnectionId"); + PeerConnectionObserver pco = stateProvider.getPeerConnectionObserver(peerConnectionId); + if (pco == null) { + result.error("frameCryptorFactoryCreateFrameCryptorFailed", "peerConnection not found", null); + return; + } + String participantId = (String) params.get("participantId"); + String type = (String) params.get("type"); + int algorithm = (int) params.get("algorithm"); + String rtpSenderId = (String) params.get("rtpSenderId"); + String rtpReceiverId = (String) params.get("rtpReceiverId"); + + if(type.equals("sender")) { + RtpSender rtpSender = pco.getRtpSenderById(rtpSenderId); + + FrameCryptor frameCryptor = FrameCryptorFactory.createFrameCryptorForRtpSender(stateProvider.getPeerConnectionFactory(), + rtpSender, + participantId, + frameCryptorAlgorithmFromInt(algorithm), + keyProvider); + String frameCryptorId = UUID.randomUUID().toString(); + frameCryptos.put(frameCryptorId, frameCryptor); + FrameCryptorStateObserver observer = new FrameCryptorStateObserver(stateProvider.getMessenger(), frameCryptorId); + frameCryptor.setObserver(observer); + frameCryptoObservers.put(frameCryptorId, observer); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("frameCryptorId", frameCryptorId); + result.success(paramsResult.toMap()); + } else if(type.equals("receiver")) { + RtpReceiver rtpReceiver = pco.getRtpReceiverById(rtpReceiverId); + + FrameCryptor frameCryptor = FrameCryptorFactory.createFrameCryptorForRtpReceiver(stateProvider.getPeerConnectionFactory(), + rtpReceiver, + participantId, + frameCryptorAlgorithmFromInt(algorithm), + keyProvider); + String frameCryptorId = UUID.randomUUID().toString(); + frameCryptos.put(frameCryptorId, frameCryptor); + FrameCryptorStateObserver observer = new FrameCryptorStateObserver(stateProvider.getMessenger(), frameCryptorId); + frameCryptor.setObserver(observer); + frameCryptoObservers.put(frameCryptorId, observer); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("frameCryptorId", frameCryptorId); + result.success(paramsResult.toMap()); + } else { + result.error("frameCryptorFactoryCreateFrameCryptorFailed", "type must be sender or receiver", null); + return; + } + } + + private void frameCryptorSetKeyIndex(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorSetKeyIndexFailed", "frameCryptor not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + frameCryptor.setKeyIndex(keyIndex); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void frameCryptorGetKeyIndex(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorGetKeyIndexFailed", "frameCryptor not found", null); + return; + } + int keyIndex = frameCryptor.getKeyIndex(); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putInt("keyIndex", keyIndex); + result.success(paramsResult.toMap()); + } + + private void frameCryptorSetEnabled(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorSetEnabledFailed", "frameCryptor not found", null); + return; + } + boolean enabled = (boolean) params.get("enabled"); + frameCryptor.setEnabled(enabled); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", enabled); + result.success(paramsResult.toMap()); + } + + private void frameCryptorGetEnabled(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorGetEnabledFailed", "frameCryptor not found", null); + return; + } + boolean enabled = frameCryptor.isEnabled(); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("enabled", enabled); + result.success(paramsResult.toMap()); + } + + private void frameCryptorDispose(Map params, @NonNull Result result) { + String frameCryptorId = (String) params.get("frameCryptorId"); + FrameCryptor frameCryptor = frameCryptos.get(frameCryptorId); + if (frameCryptor == null) { + result.error("frameCryptorDisposeFailed", "frameCryptor not found", null); + return; + } + frameCryptor.dispose(); + frameCryptos.remove(frameCryptorId); + frameCryptoObservers.remove(frameCryptorId); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("result", "success"); + result.success(paramsResult.toMap()); + } + + private void frameCryptorFactoryCreateKeyProvider(Map params, @NonNull Result result) { + String keyProviderId = UUID.randomUUID().toString(); + Map keyProviderOptions = (Map) params.get("keyProviderOptions"); + boolean sharedKey = (boolean) keyProviderOptions.get("sharedKey"); + int ratchetWindowSize = (int) keyProviderOptions.get("ratchetWindowSize"); + int failureTolerance = (int) keyProviderOptions.get("failureTolerance"); + byte[] ratchetSalt = ( byte[]) keyProviderOptions.get("ratchetSalt"); + byte[] uncryptedMagicBytes = new byte[0]; + if(keyProviderOptions.containsKey("uncryptedMagicBytes")) { + uncryptedMagicBytes = ( byte[]) keyProviderOptions.get("uncryptedMagicBytes"); + } + int keyRingSize = (int) keyProviderOptions.get("keyRingSize"); + boolean discardFrameWhenCryptorNotReady = (boolean) keyProviderOptions.get("discardFrameWhenCryptorNotReady"); + FrameCryptorKeyProvider keyProvider = FrameCryptorFactory.createFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); + ConstraintsMap paramsResult = new ConstraintsMap(); + keyProviders.put(keyProviderId, keyProvider); + paramsResult.putString("keyProviderId", keyProviderId); + result.success(paramsResult.toMap()); + } + + private void keyProviderSetSharedKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetKeySharedFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + byte[] key = ( byte[]) params.get("key"); + keyProvider.setSharedKey(keyIndex, key); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void keyProviderRatchetSharedKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderRatchetSharedKeyFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + + byte[] newKey = keyProvider.ratchetSharedKey(keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", newKey); + result.success(paramsResult.toMap()); + } + + private void keyProviderExportSharedKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderExportSharedKeyFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + + byte[] key = keyProvider.exportSharedKey(keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", key); + result.success(paramsResult.toMap()); + } + + private void keyProviderSetKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetKeyFailed", "keyProvider not found", null); + return; + } + int keyIndex = (int) params.get("keyIndex"); + String participantId = (String) params.get("participantId"); + byte[] key = ( byte[]) params.get("key"); + keyProvider.setKey(participantId, keyIndex, key); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void keyProviderRatchetKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetKeysFailed", "keyProvider not found", null); + return; + } + String participantId = (String) params.get("participantId"); + int keyIndex = (int) params.get("keyIndex"); + + byte[] newKey = keyProvider.ratchetKey(participantId, keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", newKey); + result.success(paramsResult.toMap()); + } + + private void keyProviderExportKey(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderExportKeyFailed", "keyProvider not found", null); + return; + } + String participantId = (String) params.get("participantId"); + int keyIndex = (int) params.get("keyIndex"); + + byte[] key = keyProvider.exportKey(participantId, keyIndex); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putByte("result", key); + result.success(paramsResult.toMap()); + } + + private void keyProviderSetSifTrailer(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderSetSifTrailerFailed", "keyProvider not found", null); + return; + } + byte[] sifTrailer = ( byte[]) params.get("sifTrailer"); + keyProvider.setSifTrailer(sifTrailer); + + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putBoolean("result", true); + result.success(paramsResult.toMap()); + } + + private void keyProviderDispose(Map params, @NonNull Result result) { + String keyProviderId = (String) params.get("keyProviderId"); + FrameCryptorKeyProvider keyProvider = keyProviders.get(keyProviderId); + if (keyProvider == null) { + result.error("keyProviderDisposeFailed", "keyProvider not found", null); + return; + } + keyProvider.dispose(); + keyProviders.remove(keyProviderId); + ConstraintsMap paramsResult = new ConstraintsMap(); + paramsResult.putString("result", "success"); + result.success(paramsResult.toMap()); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java index 726ec1a87c..4c1598c98e 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java @@ -1,9 +1,10 @@ package com.cloudwebrtc.webrtc; -import android.content.Context; import android.util.Log; import android.graphics.SurfaceTexture; +import android.view.Surface; +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; import com.cloudwebrtc.webrtc.utils.EglUtils; @@ -12,44 +13,51 @@ import org.webrtc.EglBase; import org.webrtc.MediaStream; import org.webrtc.RendererCommon.RendererEvents; -import org.webrtc.VideoRenderer; import org.webrtc.VideoTrack; import io.flutter.plugin.common.EventChannel; +import io.flutter.view.TextureRegistry; -public class FlutterRTCVideoRenderer implements EventChannel.StreamHandler { +public class FlutterRTCVideoRenderer implements EventChannel.StreamHandler { private static final String TAG = FlutterWebRTCPlugin.TAG; - private final SurfaceTexture texture; - private final Context context; + private final TextureRegistry.SurfaceProducer producer; private int id = -1; + private MediaStream mediaStream; - public void Dispose(){ + private String ownerTag; + + public void Dispose() { //destroy - if(surfaceTextureRenderer != null) { + if (surfaceTextureRenderer != null) { surfaceTextureRenderer.release(); } - if(eventChannel != null) + if (eventChannel != null) eventChannel.setStreamHandler(null); eventSink = null; + producer.release(); } /** * The {@code RendererEvents} which listens to rendering events reported by * {@link #surfaceTextureRenderer}. */ - private final RendererEvents rendererEvents - = new RendererEvents() { - private int _rotation = 0; - private int _width = 0, _height = 0; + private RendererEvents rendererEvents; + + private void listenRendererEvents() { + rendererEvents = new RendererEvents() { + private int _rotation = -1; + private int _width = 0, _height = 0; @Override public void onFirstFrameRendered() { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "didFirstFrameRendered"); params.putInt("id", id); - eventSink.success(params.toMap()); + if (eventSink != null) { + eventSink.success(params.toMap()); + } } @Override @@ -57,26 +65,19 @@ public void onFrameResolutionChanged( int videoWidth, int videoHeight, int rotation) { - if(eventSink != null) - { - if(_width != videoWidth || _height != videoHeight){ + if (eventSink != null) { + if (_width != videoWidth || _height != videoHeight) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "didTextureChangeVideoSize"); params.putInt("id", id); - - if(rotation == 90 || rotation == 270){ - params.putDouble("width", (double) videoHeight); - params.putDouble("height", (double) videoWidth); - }else { - params.putDouble("width", (double) videoWidth); - params.putDouble("height", (double) videoHeight); - } + params.putDouble("width", (double) videoWidth); + params.putDouble("height", (double) videoHeight); _width = videoWidth; _height = videoHeight; eventSink.success(params.toMap()); } - if(_rotation != rotation){ + if (_rotation != rotation) { ConstraintsMap params2 = new ConstraintsMap(); params2.putString("event", "didTextureChangeRotation"); params2.putInt("id", id); @@ -87,14 +88,9 @@ public void onFrameResolutionChanged( } } }; + } - private SurfaceTextureRenderer surfaceTextureRenderer; - - /** - * The {@code VideoRenderer}, if any, which renders {@link #videoTrack} on - * this {@code View}. - */ - private VideoRenderer videoRenderer; + private final SurfaceTextureRenderer surfaceTextureRenderer; /** * The {@code VideoTrack}, if any, rendered by this {@code FlutterRTCVideoRenderer}. @@ -104,24 +100,28 @@ public void onFrameResolutionChanged( EventChannel eventChannel; EventChannel.EventSink eventSink; - public FlutterRTCVideoRenderer(SurfaceTexture texture, Context context) { - this.surfaceTextureRenderer = new SurfaceTextureRenderer(context, texture); - this.texture = texture; - this.context = context; + public FlutterRTCVideoRenderer(TextureRegistry.SurfaceProducer producer) { + this.surfaceTextureRenderer = new SurfaceTextureRenderer(""); + listenRendererEvents(); + surfaceTextureRenderer.init(EglUtils.getRootEglBaseContext(), rendererEvents); + surfaceTextureRenderer.surfaceCreated(producer); + this.eventSink = null; + this.producer = producer; + this.ownerTag = null; } - public void setEventChannel(EventChannel eventChannel){ + public void setEventChannel(EventChannel eventChannel) { this.eventChannel = eventChannel; } - public void setId(int id){ + public void setId(int id) { this.id = id; } @Override public void onListen(Object o, EventChannel.EventSink sink) { - eventSink = sink; + eventSink = new AnyThreadSink(sink); } @Override @@ -134,11 +134,7 @@ public void onCancel(Object o) { * resources (if rendering is in progress). */ private void removeRendererFromVideoTrack() { - if (videoRenderer != null) { - videoTrack.removeRenderer(videoRenderer); - videoRenderer.dispose(); - videoRenderer = null; - } + videoTrack.removeSink(surfaceTextureRenderer); } /** @@ -147,17 +143,48 @@ private void removeRendererFromVideoTrack() { * specified {@code mediaStream}. * * @param mediaStream The {@code MediaStream} to be rendered by this - * {@code FlutterRTCVideoRenderer} or {@code null}. + * {@code FlutterRTCVideoRenderer} or {@code null}. */ - public void setStream(MediaStream mediaStream) { + public void setStream(MediaStream mediaStream, String ownerTag) { VideoTrack videoTrack; + this.mediaStream = mediaStream; + this.ownerTag = ownerTag; + if (mediaStream == null) { + videoTrack = null; + } else { + List videoTracks = mediaStream.videoTracks; + videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0); + } + + setVideoTrack(videoTrack); + } + /** + * Sets the {@code MediaStream} to be rendered by this {@code FlutterRTCVideoRenderer}. + * The implementation renders the first {@link VideoTrack}, if any, of the + * specified trackId + * + * @param mediaStream The {@code MediaStream} to be rendered by this + * {@code FlutterRTCVideoRenderer} or {@code null}. + * @param trackId The {@code trackId} to be rendered by this + * {@code FlutterRTCVideoRenderer} or {@code null}. + */ + public void setStream(MediaStream mediaStream,String trackId, String ownerTag) { + VideoTrack videoTrack; + this.mediaStream = mediaStream; + this.ownerTag = ownerTag; if (mediaStream == null) { videoTrack = null; } else { List videoTracks = mediaStream.videoTracks; videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0); + + for (VideoTrack track : videoTracks){ + if (track.id().equals(trackId)){ + videoTrack = track; + } + } } setVideoTrack(videoTrack); @@ -167,9 +194,9 @@ public void setStream(MediaStream mediaStream) { * Sets the {@code VideoTrack} to be rendered by this {@code FlutterRTCVideoRenderer}. * * @param videoTrack The {@code VideoTrack} to be rendered by this - * {@code FlutterRTCVideoRenderer} or {@code null}. + * {@code FlutterRTCVideoRenderer} or {@code null}. */ - private void setVideoTrack(VideoTrack videoTrack) { + public void setVideoTrack(VideoTrack videoTrack) { VideoTrack oldValue = this.videoTrack; if (oldValue != videoTrack) { @@ -180,9 +207,14 @@ private void setVideoTrack(VideoTrack videoTrack) { this.videoTrack = videoTrack; if (videoTrack != null) { - tryAddRendererToVideoTrack(); - }else{ - + try { + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to " + videoTrack.id()); + tryAddRendererToVideoTrack(); + } catch (Exception e) { + Log.e(TAG, "tryAddRendererToVideoTrack " + e); + } + } else { + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to null"); } } } @@ -191,9 +223,8 @@ private void setVideoTrack(VideoTrack videoTrack) { * Starts rendering {@link #videoTrack} if rendering is not in progress and * all preconditions for the start of rendering are met. */ - private void tryAddRendererToVideoTrack() { - if (videoRenderer == null - && videoTrack != null) { + private void tryAddRendererToVideoTrack() throws Exception { + if (videoTrack != null) { EglBase.Context sharedContext = EglUtils.getRootEglBaseContext(); if (sharedContext == null) { @@ -204,11 +235,25 @@ private void tryAddRendererToVideoTrack() { } surfaceTextureRenderer.release(); - surfaceTextureRenderer = new SurfaceTextureRenderer(context, texture); + listenRendererEvents(); surfaceTextureRenderer.init(sharedContext, rendererEvents); + surfaceTextureRenderer.surfaceCreated(producer); + + videoTrack.addSink(surfaceTextureRenderer); + } + } + + public boolean checkMediaStream(String id, String ownerTag) { + if (null == id || null == mediaStream || ownerTag == null || !ownerTag.equals(this.ownerTag)) { + return false; + } + return id.equals(mediaStream.getId()); + } - videoRenderer = new VideoRenderer(surfaceTextureRenderer); - videoTrack.addRenderer(videoRenderer); + public boolean checkVideoTrack(String id, String ownerTag) { + if (null == id || null == videoTrack || ownerTag == null || !ownerTag.equals(this.ownerTag)) { + return false; } + return id.equals(videoTrack.id()); } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java index 41e99c245c..3a49f88c85 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java @@ -1,1006 +1,198 @@ package com.cloudwebrtc.webrtc; import android.app.Activity; +import android.app.Application; import android.content.Context; -import android.hardware.Camera; -import android.graphics.SurfaceTexture; +import android.os.Bundle; import android.util.Log; -import android.util.LongSparseArray; -import com.cloudwebrtc.webrtc.utils.ConstraintsArray; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.EglUtils; -import com.cloudwebrtc.webrtc.utils.ObjectType; +import androidx.annotation.NonNull; +import androidx.lifecycle.DefaultLifecycleObserver; +import androidx.lifecycle.Lifecycle; +import androidx.lifecycle.LifecycleOwner; -import java.util.*; +import com.cloudwebrtc.webrtc.audio.AudioProcessingController; +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import org.webrtc.AudioTrack; -import org.webrtc.EglBase; -import org.webrtc.IceCandidate; -import org.webrtc.Logging; -import org.webrtc.MediaConstraints; -import org.webrtc.MediaStream; +import org.webrtc.ExternalAudioProcessingFactory; import org.webrtc.MediaStreamTrack; -import org.webrtc.PeerConnection; -import org.webrtc.PeerConnectionFactory; -import org.webrtc.SdpObserver; -import org.webrtc.SessionDescription; -import org.webrtc.VideoTrack; +import io.flutter.embedding.engine.plugins.FlutterPlugin; +import io.flutter.embedding.engine.plugins.activity.ActivityAware; +import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding; +import io.flutter.embedding.engine.plugins.lifecycle.HiddenLifecycleReference; +import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel; -import io.flutter.plugin.common.MethodChannel.MethodCallHandler; -import io.flutter.plugin.common.MethodChannel.Result; -import io.flutter.plugin.common.MethodCall; -import io.flutter.plugin.common.PluginRegistry.Registrar; import io.flutter.view.TextureRegistry; /** * FlutterWebRTCPlugin */ -public class FlutterWebRTCPlugin implements MethodCallHandler { +public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventChannel.StreamHandler { static public final String TAG = "FlutterWebRTCPlugin"; + private static Application application; - private final Registrar registrar; - private final MethodChannel channel; - - public Map localStreams; - public Map localTracks; - private final Map mPeerConnectionObservers; - - private final TextureRegistry textures; - private LongSparseArray renders = new LongSparseArray<>(); - - /** - * The implementation of {@code getUserMedia} extracted into a separate file - * in order to reduce complexity and to (somewhat) separate concerns. - */ - private GetUserMediaImpl getUserMediaImpl; - final PeerConnectionFactory mFactory; - - - public Activity getActivity() { - return registrar.activity(); - } - - public Context getContext() { - return registrar.context(); - } - - /** - * Plugin registration. - */ - public static void registerWith(Registrar registrar) { - final MethodChannel channel = new MethodChannel(registrar.messenger(), "cloudwebrtc.com/WebRTC.Method"); - channel.setMethodCallHandler(new FlutterWebRTCPlugin(registrar, channel)); - } + private MethodChannel methodChannel; + private MethodCallHandlerImpl methodCallHandler; + private LifeCycleObserver observer; + private Lifecycle lifecycle; + private EventChannel eventChannel; + public EventChannel.EventSink eventSink; - public Registrar registrar() { - return this.registrar; + public FlutterWebRTCPlugin() { + sharedSingleton = this; } - private FlutterWebRTCPlugin(Registrar registrar, MethodChannel channel) { - this.registrar = registrar; - this.channel = channel; - this.textures = registrar.textures(); - mPeerConnectionObservers = new HashMap(); - localStreams = new HashMap(); - localTracks = new HashMap(); - - PeerConnectionFactory.initialize( - PeerConnectionFactory.InitializationOptions.builder(registrar.context()) - .setEnableInternalTracer(false) - .setEnableVideoHwAcceleration(true) - .createInitializationOptions()); - - mFactory = new PeerConnectionFactory(null); - // Initialize EGL contexts required for HW acceleration. - EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); - if (eglContext != null) { - mFactory.setVideoHwAccelerationOptions(eglContext, eglContext); - } - getUserMediaImpl = new GetUserMediaImpl(this, registrar.context()); - } - - @Override - public void onMethodCall(MethodCall call, Result result) { - if (call.method.equals("createPeerConnection")) { - Map constraints = call.argument("constraints"); - Map configuration = call.argument("configuration"); - String peerConnectionId = peerConnectionInit(new ConstraintsMap(configuration), new ConstraintsMap((constraints))); - ConstraintsMap res = new ConstraintsMap(); - res.putString("peerConnectionId", peerConnectionId); - result.success(res.toMap()); - } else if (call.method.equals("getUserMedia")) { - Map constraints = call.argument("constraints"); - ConstraintsMap constraintsMap = new ConstraintsMap(constraints); - getUserMedia(constraintsMap, result); - }else if (call.method.equals("getSources")) { - getSources(result); - }else if (call.method.equals("createOffer")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map constraints = call.argument("constraints"); - peerConnectionCreateOffer(peerConnectionId, new ConstraintsMap(constraints), result); - } else if (call.method.equals("createAnswer")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map constraints = call.argument("constraints"); - peerConnectionCreateAnswer(peerConnectionId, new ConstraintsMap(constraints), result); - } else if (call.method.equals("mediaStreamGetTracks")) { - String streamId = call.argument("streamId"); - MediaStream stream = getStreamForId(streamId); - //TODO: build tracks map. - } else if (call.method.equals("addStream")) { - String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionAddStream(streamId, peerConnectionId, result); - } else if (call.method.equals("removeStream")) { - String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionRemoveStream(streamId, peerConnectionId, result); - } else if (call.method.equals("setLocalDescription")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map description = call.argument("description"); - peerConnectionSetLocalDescription(new ConstraintsMap(description), peerConnectionId, result); - } else if (call.method.equals("setRemoteDescription")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map description = call.argument("description"); - peerConnectionSetRemoteDescription(new ConstraintsMap(description), peerConnectionId, result); - } else if (call.method.equals("addCandidate")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map candidate = call.argument("candidate"); - peerConnectionAddICECandidate(new ConstraintsMap(candidate), peerConnectionId, result); - } else if (call.method.equals("getStats")) { - String peerConnectionId = call.argument("peerConnectionId"); - String trackId = call.argument("trackId"); - peerConnectionGetStats(trackId, peerConnectionId, result); - } else if (call.method.equals("createDataChannel")) { - String peerConnectionId = call.argument("peerConnectionId"); - String label = call.argument("label"); - Map dataChannelDict = call.argument("dataChannelDict"); - createDataChannel(peerConnectionId, label, new ConstraintsMap(dataChannelDict), result); - } else if (call.method.equals("dataChannelSend")) { - String peerConnectionId = call.argument("peerConnectionId"); - int dataChannelId = call.argument("dataChannelId"); - String data = call.argument("data"); - String type = call.argument("type"); - dataChannelSend(peerConnectionId, dataChannelId, data, type); - } else if (call.method.equals("dataChannelClose")) { - String peerConnectionId = call.argument("peerConnectionId"); - int dataChannelId = call.argument("dataChannelId"); - dataChannelClose(peerConnectionId, dataChannelId); - } else if (call.method.equals("streamDispose")) { - String streamId = call.argument("streamId"); - mediaStreamRelease(streamId); - result.success(null); - } else if (call.method.equals("trackDispose")) { - String trackId = call.argument("trackId"); - localTracks.remove(trackId); - result.success(null); - } else if (call.method.equals("peerConnectionClose")) { - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionClose(peerConnectionId); - result.success(null); - } else if (call.method.equals("createVideoRenderer")) { - TextureRegistry.SurfaceTextureEntry entry = textures.createSurfaceTexture(); - SurfaceTexture surfaceTexture = entry.surfaceTexture(); - FlutterRTCVideoRenderer render = new FlutterRTCVideoRenderer(surfaceTexture, getContext()); - renders.put(entry.id(), render); - - EventChannel eventChannel = - new EventChannel( - registrar.messenger(), - "cloudwebrtc.com/WebRTC/Texture" + entry.id()); - - eventChannel.setStreamHandler(render); - render.setEventChannel(eventChannel); - render.setId((int)entry.id()); - - ConstraintsMap params = new ConstraintsMap(); - params.putInt("textureId", (int)entry.id()); - result.success(params.toMap()); - } else if (call.method.equals("videoRendererDispose")) { - int textureId = call.argument("textureId"); - FlutterRTCVideoRenderer render = renders.get(textureId); - render.Dispose(); - renders.delete(textureId); - } else if (call.method.equals("videoRendererSetSrcObject")) { - int textureId = call.argument("textureId"); - String streamId = call.argument("streamId"); - - FlutterRTCVideoRenderer render = renders.get(textureId); - - if(render == null ){ - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", null); - return; - } - - MediaStream stream = getStreamForId(streamId); - render.setStream(stream); - result.success(null); - } else { - result.notImplemented(); - } - } + public static FlutterWebRTCPlugin sharedSingleton; - private PeerConnection getPeerConnection(String id) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - return (pco == null) ? null : pco.getPeerConnection(); + public AudioProcessingController getAudioProcessingController() { + return methodCallHandler.audioProcessingController; } - private List createIceServers(ConstraintsArray iceServersArray) { - final int size = (iceServersArray == null) ? 0 : iceServersArray.size(); - List iceServers = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - ConstraintsMap iceServerMap = iceServersArray.getMap(i); - boolean hasUsernameAndCredential = iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); - if (iceServerMap.hasKey("url")) { - if (hasUsernameAndCredential) { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("url"), iceServerMap.getString("username"), iceServerMap.getString("credential"))); - } else { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("url"))); - } - } else if (iceServerMap.hasKey("urls")) { - switch (iceServerMap.getType("urls")) { - case String: - if (hasUsernameAndCredential) { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("urls"), iceServerMap.getString("username"), iceServerMap.getString("credential"))); - } else { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("urls"))); - } - break; - case Array: - ConstraintsArray urls = iceServerMap.getArray("urls"); - for (int j = 0; j < urls.size(); j++) { - String url = urls.getString(j); - if (hasUsernameAndCredential) { - iceServers.add(new PeerConnection.IceServer(url, iceServerMap.getString("username"), iceServerMap.getString("credential"))); - } else { - iceServers.add(new PeerConnection.IceServer(url)); - } - } - break; - } - } - } - return iceServers; + public MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { + return methodCallHandler.getTrackForId(trackId, peerConnectionId); } - private PeerConnection.RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { - ConstraintsArray iceServersArray = null; - if (map != null) { - iceServersArray = map.getArray("iceServers"); - } - List iceServers = createIceServers(iceServersArray); - PeerConnection.RTCConfiguration conf = new PeerConnection.RTCConfiguration(iceServers); - if (map == null) { - return conf; - } - - // iceTransportPolicy (public api) - if (map.hasKey("iceTransportPolicy") - && map.getType("iceTransportPolicy") == ObjectType.String) { - final String v = map.getString("iceTransportPolicy"); - if (v != null) { - switch (v) { - case "all": // public - conf.iceTransportsType = PeerConnection.IceTransportsType.ALL; - break; - case "relay": // public - conf.iceTransportsType = PeerConnection.IceTransportsType.RELAY; - break; - case "nohost": - conf.iceTransportsType = PeerConnection.IceTransportsType.NOHOST; - break; - case "none": - conf.iceTransportsType = PeerConnection.IceTransportsType.NONE; - break; - } - } - } - - // bundlePolicy (public api) - if (map.hasKey("bundlePolicy") - && map.getType("bundlePolicy") == ObjectType.String) { - final String v = map.getString("bundlePolicy"); - if (v != null) { - switch (v) { - case "balanced": // public - conf.bundlePolicy = PeerConnection.BundlePolicy.BALANCED; - break; - case "max-compat": // public - conf.bundlePolicy = PeerConnection.BundlePolicy.MAXCOMPAT; - break; - case "max-bundle": // public - conf.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; - break; - } - } - } - - // rtcpMuxPolicy (public api) - if (map.hasKey("rtcpMuxPolicy") - && map.getType("rtcpMuxPolicy") == ObjectType.String) { - final String v = map.getString("rtcpMuxPolicy"); - if (v != null) { - switch (v) { - case "negotiate": // public - conf.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.NEGOTIATE; - break; - case "require": // public - conf.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; - break; - } - } - } - - // FIXME: peerIdentity of type DOMString (public api) - // FIXME: certificates of type sequence (public api) - - // iceCandidatePoolSize of type unsigned short, defaulting to 0 - if (map.hasKey("iceCandidatePoolSize") - && map.getType("iceCandidatePoolSize") == ObjectType.Number) { - final int v = map.getInt("iceCandidatePoolSize"); - if (v > 0) { - conf.iceCandidatePoolSize = v; - } - } - - // === below is private api in webrtc === - - // tcpCandidatePolicy (private api) - if (map.hasKey("tcpCandidatePolicy") - && map.getType("tcpCandidatePolicy") == ObjectType.String) { - final String v = map.getString("tcpCandidatePolicy"); - if (v != null) { - switch (v) { - case "enabled": - conf.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED; - break; - case "disabled": - conf.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; - break; - } - } - } - - // candidateNetworkPolicy (private api) - if (map.hasKey("candidateNetworkPolicy") - && map.getType("candidateNetworkPolicy") == ObjectType.String) { - final String v = map.getString("candidateNetworkPolicy"); - if (v != null) { - switch (v) { - case "all": - conf.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.ALL; - break; - case "low_cost": - conf.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.LOW_COST; - break; - } - } - } - - // KeyType (private api) - if (map.hasKey("keyType") - && map.getType("keyType") == ObjectType.String) { - final String v = map.getString("keyType"); - if (v != null) { - switch (v) { - case "RSA": - conf.keyType = PeerConnection.KeyType.RSA; - break; - case "ECDSA": - conf.keyType = PeerConnection.KeyType.ECDSA; - break; - } - } - } - - // continualGatheringPolicy (private api) - if (map.hasKey("continualGatheringPolicy") - && map.getType("continualGatheringPolicy") == ObjectType.String) { - final String v = map.getString("continualGatheringPolicy"); - if (v != null) { - switch (v) { - case "gather_once": - conf.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_ONCE; - break; - case "gather_continually": - conf.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; - break; - } - } - } - - // audioJitterBufferMaxPackets (private api) - if (map.hasKey("audioJitterBufferMaxPackets") - && map.getType("audioJitterBufferMaxPackets") == ObjectType.Number) { - final int v = map.getInt("audioJitterBufferMaxPackets"); - if (v > 0) { - conf.audioJitterBufferMaxPackets = v; - } - } - - // iceConnectionReceivingTimeout (private api) - if (map.hasKey("iceConnectionReceivingTimeout") - && map.getType("iceConnectionReceivingTimeout") == ObjectType.Number) { - final int v = map.getInt("iceConnectionReceivingTimeout"); - conf.iceConnectionReceivingTimeout = v; - } - - // iceBackupCandidatePairPingInterval (private api) - if (map.hasKey("iceBackupCandidatePairPingInterval") - && map.getType("iceBackupCandidatePairPingInterval") == ObjectType.Number) { - final int v = map.getInt("iceBackupCandidatePairPingInterval"); - conf.iceBackupCandidatePairPingInterval = v; - } - - // audioJitterBufferFastAccelerate (private api) - if (map.hasKey("audioJitterBufferFastAccelerate") - && map.getType("audioJitterBufferFastAccelerate") == ObjectType.Boolean) { - final boolean v = map.getBoolean("audioJitterBufferFastAccelerate"); - conf.audioJitterBufferFastAccelerate = v; - } - - // pruneTurnPorts (private api) - if (map.hasKey("pruneTurnPorts") - && map.getType("pruneTurnPorts") == ObjectType.Boolean) { - final boolean v = map.getBoolean("pruneTurnPorts"); - conf.pruneTurnPorts = v; - } - - // presumeWritableWhenFullyRelayed (private api) - if (map.hasKey("presumeWritableWhenFullyRelayed") - && map.getType("presumeWritableWhenFullyRelayed") == ObjectType.Boolean) { - final boolean v = map.getBoolean("presumeWritableWhenFullyRelayed"); - conf.presumeWritableWhenFullyRelayed = v; - } - - return conf; + public LocalTrack getLocalTrack(String trackId) { + return methodCallHandler.getLocalTrack(trackId); } - public String peerConnectionInit( - ConstraintsMap configuration, - ConstraintsMap constraints) { - - String peerConnectionId = getNextStreamUUID(); - PeerConnectionObserver observer = new PeerConnectionObserver(this, peerConnectionId); - PeerConnection peerConnection - = mFactory.createPeerConnection( - parseRTCConfiguration(configuration), - parseMediaConstraints(constraints), - observer); - observer.setPeerConnection(peerConnection); - mPeerConnectionObservers.put(peerConnectionId, observer); - return peerConnectionId; + public MediaStreamTrack getRemoteTrack(String trackId) { + return methodCallHandler.getRemoteTrack(trackId); } - String getNextStreamUUID() { - String uuid; - - do { - uuid = UUID.randomUUID().toString(); - } while (getStreamForId(uuid) != null); - - return uuid; + @Override + public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { + startListening(binding.getApplicationContext(), binding.getBinaryMessenger(), + binding.getTextureRegistry()); } - String getNextTrackUUID() { - String uuid; - - do { - uuid = UUID.randomUUID().toString(); - } while (getTrackForId(uuid) != null); - - return uuid; + @Override + public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { + stopListening(); } - MediaStream getStreamForId(String id) { - MediaStream stream = localStreams.get(id); - - if (stream == null) { - for (Map.Entry entry : mPeerConnectionObservers.entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - stream = pco.remoteStreams.get(id); - if (stream != null) { - break; - } - } - } - - return stream; + @Override + public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) { + methodCallHandler.setActivity(binding.getActivity()); + this.observer = new LifeCycleObserver(); + this.lifecycle = ((HiddenLifecycleReference) binding.getLifecycle()).getLifecycle(); + this.lifecycle.addObserver(this.observer); } - private MediaStreamTrack getTrackForId(String trackId) { - MediaStreamTrack track = localTracks.get(trackId); - - if (track == null) { - for (Map.Entry entry : mPeerConnectionObservers.entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - track = pco.remoteTracks.get(trackId); - if (track != null) { - break; - } - } - } - - return track; + @Override + public void onDetachedFromActivityForConfigChanges() { + methodCallHandler.setActivity(null); } - /** - * Parses a constraint set specified in the form of a JavaScript object into - * a specific List of MediaConstraints.KeyValuePairs. - * - * @param src The constraint set in the form of a JavaScript object to - * parse. - * @param dst The List of MediaConstraints.KeyValuePairs - * into which the specified src is to be parsed. - */ - private void parseConstraints( - ConstraintsMap src, - List dst) { - - for (Map.Entry entry : src.toMap().entrySet()) { - String key = entry.getKey(); - String value = getMapStrValue(src, entry.getKey()); - dst.add(new MediaConstraints.KeyValuePair(key, value)); - } + @Override + public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) { + methodCallHandler.setActivity(binding.getActivity()); } - private String getMapStrValue(ConstraintsMap map, String key) { - if(!map.hasKey(key)){ + @Override + public void onDetachedFromActivity() { + methodCallHandler.setActivity(null); + if (this.observer != null) { + this.lifecycle.removeObserver(this.observer); + if (application!=null) { + application.unregisterActivityLifecycleCallbacks(this.observer); + } + } + this.lifecycle = null; + } + + private void startListening(final Context context, BinaryMessenger messenger, + TextureRegistry textureRegistry) { + AudioSwitchManager.instance = new AudioSwitchManager(context); + methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry); + methodChannel = new MethodChannel(messenger, "FlutterWebRTC.Method"); + methodChannel.setMethodCallHandler(methodCallHandler); + eventChannel = new EventChannel( messenger,"FlutterWebRTC.Event"); + eventChannel.setStreamHandler(this); + AudioSwitchManager.instance.audioDeviceChangeListener = (devices, currentDevice) -> { + Log.w(TAG, "audioFocusChangeListener " + devices+ " " + currentDevice); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onDeviceChange"); + sendEvent(params.toMap()); return null; - } - ObjectType type = map.getType(key); - switch (type) { - case Boolean: - return String.valueOf(map.getBoolean(key)); - case Number: - // Don't know how to distinguish between Int and Double from - // ReadableType.Number. 'getInt' will fail on double value, - // while 'getDouble' works for both. - // return String.valueOf(map.getInt(key)); - return String.valueOf(map.getDouble(key)); - case String: - return map.getString(key); - default: - return null; - } - } - - /** - * Parses mandatory and optional "GUM" constraints described by a specific - * ConstraintsMap. - * - * @param constraints A ConstraintsMap which represents a JavaScript - * object specifying the constraints to be parsed into a - * MediaConstraints instance. - * @return A new MediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ - MediaConstraints parseMediaConstraints(ConstraintsMap constraints) { - MediaConstraints mediaConstraints = new MediaConstraints(); - - if (constraints.hasKey("mandatory") - && constraints.getType("mandatory") == ObjectType.Map) { - parseConstraints(constraints.getMap("mandatory"), - mediaConstraints.mandatory); - } else { - Log.d(TAG, "mandatory constraints are not a map"); - } - - if (constraints.hasKey("optional") - && constraints.getType("optional") == ObjectType.Array) { - ConstraintsArray optional = constraints.getArray("optional"); - - for (int i = 0, size = optional.size(); i < size; i++) { - if (optional.getType(i) == ObjectType.Map) { - parseConstraints( - optional.getMap(i), - mediaConstraints.optional); - } - } - } else { - Log.d(TAG, "optional constraints are not an array"); - } - - return mediaConstraints; - } - - public void getUserMedia(ConstraintsMap constraints, Result result) { - String streamId = getNextStreamUUID(); - MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); - - if (mediaStream == null) { - // XXX The following does not follow the getUserMedia() algorithm - // specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getUserMediaFailed", - "Failed to create new media stream", null); - return; - } - - getUserMediaImpl.getUserMedia(constraints, result, mediaStream); - } - - public void getSources(Result result) { - ConstraintsArray array = new ConstraintsArray(); - String[] names = new String[Camera.getNumberOfCameras()]; - - for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { - ConstraintsMap info = getCameraInfo(i); - if (info != null) { - array.pushMap(info); - } - } - - ConstraintsMap audio = new ConstraintsMap(); - audio.putString("label", "Audio"); - audio.putString("deviceId", "audio-1"); - audio.putString("facing", ""); - audio.putString("kind", "audioinput"); - array.pushMap(audio); - result.success(array); - } - - public void mediaStreamTrackStop(final String id) { - // Is this functionality equivalent to `mediaStreamTrackRelease()` ? - // if so, we should merge this two and remove track from stream as well. - MediaStreamTrack track = localTracks.get(id); - if (track == null) { - Log.d(TAG, "mediaStreamTrackStop() track is null"); - return; - } - track.setEnabled(false); - if (track.kind().equals("video")) { - getUserMediaImpl.removeVideoCapturer(id); - } - localTracks.remove(id); - // What exactly does `detached` mean in doc? - // see: https://www.w3.org/TR/mediacapture-streams/#track-detached + }; } - public void mediaStreamTrackSetEnabled(final String id, final boolean enabled) { - MediaStreamTrack track = localTracks.get(id); - if (track == null) { - Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); - return; - } else if (track.enabled() == enabled) { - return; + private void stopListening() { + methodCallHandler.dispose(); + methodCallHandler = null; + methodChannel.setMethodCallHandler(null); + eventChannel.setStreamHandler(null); + if (AudioSwitchManager.instance != null) { + Log.d(TAG, "Stopping the audio manager..."); + AudioSwitchManager.instance.stop(); } - track.setEnabled(enabled); } - public void mediaStreamTrackSwitchCamera(final String id) { - MediaStreamTrack track = localTracks.get(id); - if (track != null) { - getUserMediaImpl.switchCamera(id); - } + @Override + public void onListen(Object arguments, EventChannel.EventSink events) { + eventSink = new AnyThreadSink(events); } - - public void mediaStreamTrackRelease(final String streamId, final String _trackId) { - MediaStream stream = localStreams.get(streamId); - if (stream == null) { - Log.d(TAG, "mediaStreamTrackRelease() stream is null"); - return; - } - MediaStreamTrack track = localTracks.get(_trackId); - if (track == null) { - Log.d(TAG, "mediaStreamTrackRelease() track is null"); - return; - } - track.setEnabled(false); // should we do this? - localTracks.remove(_trackId); - if (track.kind().equals("audio")) { - stream.removeTrack((AudioTrack) track); - } else if (track.kind().equals("video")) { - stream.removeTrack((VideoTrack) track); - getUserMediaImpl.removeVideoCapturer(_trackId); - } + @Override + public void onCancel(Object arguments) { + eventSink = null; } - public ConstraintsMap getCameraInfo(int index) { - Camera.CameraInfo info = new Camera.CameraInfo(); - - try { - Camera.getCameraInfo(index, info); - } catch (Exception e) { - Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, e); - return null; + public void sendEvent(Object event) { + if(eventSink != null) { + eventSink.success(event); } - ConstraintsMap params = new ConstraintsMap(); - String facing = info.facing == 1 ? "front" : "back"; - params.putString("label", "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); - params.putString("deviceId", "" + index); - params.putString("facing", facing); - params.putString("kind", "videoinput"); - return params; } - private MediaConstraints defaultConstraints() { - MediaConstraints constraints = new MediaConstraints(); - // TODO video media - constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); - constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); - constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true")); - return constraints; - } + private class LifeCycleObserver implements Application.ActivityLifecycleCallbacks, DefaultLifecycleObserver { - public void peerConnectionSetConfiguration(ConstraintsMap configuration, final String id) { - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection == null) { - Log.d(TAG, "peerConnectionSetConfiguration() peerConnection is null"); - return; - } - peerConnection.setConfiguration(parseRTCConfiguration(configuration)); - } + @Override + public void onActivityCreated(Activity activity, Bundle savedInstanceState) { - public void peerConnectionAddStream(final String streamId, final String id, Result result) { - MediaStream mediaStream = localStreams.get(streamId); - if (mediaStream == null) { - Log.d(TAG, "peerConnectionAddStream() mediaStream is null"); - return; - } - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection != null) { - boolean res = peerConnection.addStream(mediaStream); - Log.d(TAG, "addStream" + result); - result.success(res); - } else { - Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); - result.error("peerConnectionAddStreamFailed", "peerConnectionAddStream() peerConnection is null", null); } - } - public void peerConnectionRemoveStream(final String streamId, final String id, Result result) { - MediaStream mediaStream = localStreams.get(streamId); - if (mediaStream == null) { - Log.d(TAG, "peerConnectionRemoveStream() mediaStream is null"); - return; - } - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection != null) { - peerConnection.removeStream(mediaStream); - result.success(null); - } else { - Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); - result.error("peerConnectionRemoveStreamFailed", "peerConnectionAddStream() peerConnection is null", null); - } - } + @Override + public void onActivityStarted(Activity activity) { - public void peerConnectionCreateOffer( - String id, - ConstraintsMap constraints, - final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - - if (peerConnection != null) { - peerConnection.createOffer(new SdpObserver() { - @Override - public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_OFFER_ERROR", s, null); - } - - @Override - public void onCreateSuccess(final SessionDescription sdp) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); - } - - @Override - public void onSetFailure(String s) { - } - - @Override - public void onSetSuccess() { - } - }, parseMediaConstraints(constraints)); - } else { - Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); - result.error("WEBRTC_CREATE_OFFER_ERROR", "peerConnection is null", null); } - } - public void peerConnectionCreateAnswer( - String id, - ConstraintsMap constraints, - final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - - if (peerConnection != null) { - peerConnection.createAnswer(new SdpObserver() { - @Override - public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_ANSWER_ERROR", s, null); - } - - @Override - public void onCreateSuccess(final SessionDescription sdp) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); - } - - @Override - public void onSetFailure(String s) { - } - - @Override - public void onSetSuccess() { - } - }, parseMediaConstraints(constraints)); - } else { - Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); - result.error("WEBRTC_CREATE_ANSWER_ERROR", "peerConnection is null", null); + @Override + public void onActivityResumed(Activity activity) { + if (null != methodCallHandler) { + methodCallHandler.reStartCamera(); + } } - } - public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - - Log.d(TAG, "peerConnectionSetLocalDescription() start"); - if (peerConnection != null) { - SessionDescription sdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm(sdpMap.getString("type")), - sdpMap.getString("sdp") - ); - - peerConnection.setLocalDescription(new SdpObserver() { - @Override - public void onCreateSuccess(final SessionDescription sdp) { - } - - @Override - public void onSetSuccess() { - result.success(null); - } - - @Override - public void onCreateFailure(String s) { - } - - @Override - public void onSetFailure(String s) { - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", s, null); - } - }, sdp); - } else { - Log.d(TAG, "peerConnectionSetLocalDescription() peerConnection is null"); - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", "peerConnection is null", null); + @Override + public void onResume(LifecycleOwner owner) { + if (null != methodCallHandler) { + methodCallHandler.reStartCamera(); + } } - Log.d(TAG, "peerConnectionSetLocalDescription() end"); - } - public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - // final String d = sdpMap.getString("type"); - - Log.d(TAG, "peerConnectionSetRemoteDescription() start"); - if (peerConnection != null) { - SessionDescription sdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm(sdpMap.getString("type")), - sdpMap.getString("sdp") - ); - - peerConnection.setRemoteDescription(new SdpObserver() { - @Override - public void onCreateSuccess(final SessionDescription sdp) { - } - - @Override - public void onSetSuccess() { - result.success(null); - } - - @Override - public void onCreateFailure(String s) { - } - - @Override - public void onSetFailure(String s) { - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", s, null); - } - }, sdp); - } else { - Log.d(TAG, "peerConnectionSetRemoteDescription() peerConnection is null"); - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", "peerConnection is null", null); - } - Log.d(TAG, "peerConnectionSetRemoteDescription() end"); - } + @Override + public void onActivityPaused(Activity activity) { - public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, final Result result) { - boolean res = false; - PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "peerConnectionAddICECandidate() start"); - if (peerConnection != null) { - IceCandidate candidate = new IceCandidate( - candidateMap.getString("sdpMid"), - candidateMap.getInt("sdpMLineIndex"), - candidateMap.getString("candidate") - ); - res = peerConnection.addIceCandidate(candidate); - } else { - Log.d(TAG, "peerConnectionAddICECandidate() peerConnection is null"); - result.error("peerConnectionAddICECandidateFailed", "peerConnectionAddICECandidate() peerConnection is null", null); } - result.success(res); - Log.d(TAG, "peerConnectionAddICECandidate() end"); - } - public void peerConnectionGetStats(String trackId, String id, final Result result) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); - } else { - pco.getStats(trackId, result); - } - } + @Override + public void onActivityStopped(Activity activity) { - public void peerConnectionClose(final String id) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionClose() peerConnection is null"); - } else { - pco.close(); - mPeerConnectionObservers.remove(id); } - } - public void mediaStreamRelease(final String id) { - MediaStream mediaStream = localStreams.get(id); - if (mediaStream != null) { - for (VideoTrack track : mediaStream.videoTracks) { - localTracks.remove(track.id()); - getUserMediaImpl.removeVideoCapturer(track.id()); - } - for (AudioTrack track : mediaStream.audioTracks) { - localTracks.remove(track.id()); - } - localStreams.remove(id); - } else { - Log.d(TAG, "mediaStreamRelease() mediaStream is null"); - } - } + @Override + public void onActivitySaveInstanceState(Activity activity, Bundle outState) { - public void createDataChannel(final String peerConnectionId, String label, ConstraintsMap config, Result result) { - // Forward to PeerConnectionObserver which deals with DataChannels - // because DataChannel is owned by PeerConnection. - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "createDataChannel() peerConnection is null"); - } else { - pco.createDataChannel(label, config, result); } - } - public void dataChannelSend(String peerConnectionId, int dataChannelId, String data, String type) { - // Forward to PeerConnectionObserver which deals with DataChannels - // because DataChannel is owned by PeerConnection. - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "dataChannelSend() peerConnection is null"); - } else { - pco.dataChannelSend(dataChannelId, data, type); - } - } + @Override + public void onActivityDestroyed(Activity activity) { - public void dataChannelClose(String peerConnectionId, int dataChannelId) { - // Forward to PeerConnectionObserver which deals with DataChannels - // because DataChannel is owned by PeerConnection. - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "dataChannelClose() peerConnection is null"); - } else { - pco.dataChannelClose(dataChannelId); } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index b45ed48ab2..0b0998f384 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -1,50 +1,100 @@ package com.cloudwebrtc.webrtc; import android.Manifest; +import android.app.Activity; import android.app.Fragment; import android.app.FragmentTransaction; +import android.content.ContentResolver; +import android.content.ContentValues; import android.content.Context; +import android.content.Intent; import android.content.pm.PackageManager; +import android.graphics.Point; +import android.hardware.camera2.CameraManager; +import android.media.AudioDeviceInfo; +import android.media.projection.MediaProjection; +import android.media.projection.MediaProjectionManager; +import android.net.Uri; import android.os.Build; +import android.os.Build.VERSION; +import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; +import android.os.ParcelFileDescriptor; import android.os.ResultReceiver; +import android.provider.MediaStore; import android.util.Log; -import android.content.Intent; -import android.app.Activity; - -import android.media.projection.MediaProjection; -import android.media.projection.MediaProjectionManager; - +import android.util.Pair; +import android.util.SparseArray; +import android.view.Display; +import android.view.WindowManager; + +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.audio.AudioUtils; +import com.cloudwebrtc.webrtc.audio.LocalAudioTrack; +import com.cloudwebrtc.webrtc.record.AudioChannel; +import com.cloudwebrtc.webrtc.record.AudioSamplesInterceptor; +import com.cloudwebrtc.webrtc.record.MediaRecorderImpl; +import com.cloudwebrtc.webrtc.record.OutputAudioSamplesInterceptor; import com.cloudwebrtc.webrtc.utils.Callback; import com.cloudwebrtc.webrtc.utils.ConstraintsArray; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.EglUtils; +import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils; import com.cloudwebrtc.webrtc.utils.ObjectType; import com.cloudwebrtc.webrtc.utils.PermissionUtils; - +import com.cloudwebrtc.webrtc.video.LocalVideoTrack; +import com.cloudwebrtc.webrtc.video.VideoCapturerInfo; + +import org.webrtc.AudioSource; +import org.webrtc.AudioTrack; +import org.webrtc.Camera1Capturer; +import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera1Helper; +import org.webrtc.Camera2Capturer; +import org.webrtc.Camera2Enumerator; +import org.webrtc.Camera2Helper; +import org.webrtc.CameraEnumerator; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.Size; +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoSource; +import org.webrtc.VideoTrack; +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.webrtc.*; - import io.flutter.plugin.common.MethodChannel.Result; /** - * The implementation of {@code getUserMedia} extracted into a separate file in - * order to reduce complexity and to (somewhat) separate concerns. + * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce + * complexity and to (somewhat) separate concerns. */ -class GetUserMediaImpl{ - private static final int DEFAULT_WIDTH = 1280; +public class GetUserMediaImpl { + private static final int DEFAULT_WIDTH = 1280; private static final int DEFAULT_HEIGHT = 720; - private static final int DEFAULT_FPS = 30; + private static final int DEFAULT_FPS = 30; private static final String PERMISSION_AUDIO = Manifest.permission.RECORD_AUDIO; private static final String PERMISSION_VIDEO = Manifest.permission.CAMERA; private static final String PERMISSION_SCREEN = "android.permission.MediaProjection"; - private static int CAPTURE_PERMISSION_REQUEST_CODE = 1; + private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1; private static final String GRANT_RESULTS = "GRANT_RESULT"; private static final String PERMISSIONS = "PERMISSION"; private static final String PROJECTION_DATA = "PROJECTION_DATA"; @@ -53,18 +103,29 @@ class GetUserMediaImpl{ static final String TAG = FlutterWebRTCPlugin.TAG; - private final Map mVideoCapturers - = new HashMap(); - + private final Map mVideoCapturers = new HashMap<>(); + private final Map mSurfaceTextureHelpers = new HashMap<>(); + private final StateProvider stateProvider; private final Context applicationContext; - private final FlutterWebRTCPlugin plugin; static final int minAPILevel = Build.VERSION_CODES.LOLLIPOP; - private MediaProjectionManager mProjectionManager = null; - private static MediaProjection sMediaProjection = null; - public void screenRequestPremissions(ResultReceiver resultReceiver){ - Activity activity = plugin.getActivity(); + final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor(); + private OutputAudioSamplesInterceptor outputSamplesInterceptor = null; + JavaAudioDeviceModule audioDeviceModule; + private final SparseArray mediaRecorders = new SparseArray<>(); + private AudioDeviceInfo preferredInput = null; + private boolean isTorchOn; + private Intent mediaProjectionData = null; + + + public void screenRequestPermissions(ResultReceiver resultReceiver) { + mediaProjectionData = null; + final Activity activity = stateProvider.getActivity(); + if (activity == null) { + // Activity went away, nothing we can do. + return; + } Bundle args = new Bundle(); args.putParcelable(RESULT_RECEIVER, resultReceiver); @@ -73,10 +134,11 @@ public void screenRequestPremissions(ResultReceiver resultReceiver){ ScreenRequestPermissionsFragment fragment = new ScreenRequestPermissionsFragment(); fragment.setArguments(args); - FragmentTransaction transaction - = activity.getFragmentManager().beginTransaction().add( - fragment, - fragment.getClass().getName()); + FragmentTransaction transaction = + activity + .getFragmentManager() + .beginTransaction() + .add(fragment, fragment.getClass().getName()); try { transaction.commit(); @@ -85,17 +147,33 @@ public void screenRequestPremissions(ResultReceiver resultReceiver){ } } + public void requestCapturePermission(final Result result) { + screenRequestPermissions( + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int requestCode, Bundle resultData) { + int resultCode = resultData.getInt(GRANT_RESULTS); + if (resultCode == Activity.RESULT_OK) { + mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + result.success(true); + } else { + result.success(false); + } + } + }); + } + public static class ScreenRequestPermissionsFragment extends Fragment { - private ResultReceiver resultReceiver = null; - private int requestCode = 0; - private int resultCode = 0; + private ResultReceiver resultReceiver = null; + private int requestCode = 0; + private final int resultCode = 0; private void checkSelfPermissions(boolean requestPermissions) { - if(resultCode != Activity.RESULT_OK) { + if (resultCode != Activity.RESULT_OK) { Activity activity = this.getActivity(); Bundle args = getArguments(); - resultReceiver = (ResultReceiver) args.getParcelable(RESULT_RECEIVER); + resultReceiver = args.getParcelable(RESULT_RECEIVER); requestCode = args.getInt(REQUEST_CODE); requestStart(activity, requestCode); } @@ -103,12 +181,13 @@ private void checkSelfPermissions(boolean requestPermissions) { public void requestStart(Activity activity, int requestCode) { if (android.os.Build.VERSION.SDK_INT < minAPILevel) { - Log.w(TAG, "Can't run requestStart() due to a low API level. API level 21 or higher is required."); + Log.w( + TAG, + "Can't run requestStart() due to a low API level. API level 21 or higher is required."); return; } else { MediaProjectionManager mediaProjectionManager = - (MediaProjectionManager) activity.getSystemService( - Context.MEDIA_PROJECTION_SERVICE); + (MediaProjectionManager) activity.getSystemService(Context.MEDIA_PROJECTION_SERVICE); // call for the projection manager this.startActivityForResult( @@ -116,7 +195,6 @@ public void requestStart(Activity activity, int requestCode) { } } - @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); @@ -141,71 +219,69 @@ public void onActivityResult(int requestCode, int resultCode, Intent data) { private void finish() { Activity activity = getActivity(); if (activity != null) { - activity.getFragmentManager().beginTransaction() - .remove(this) - .commitAllowingStateLoss(); + activity.getFragmentManager().beginTransaction().remove(this).commitAllowingStateLoss(); } } @Override public void onResume() { super.onResume(); - checkSelfPermissions(/* requestPermissions */ true); } } - GetUserMediaImpl( - FlutterWebRTCPlugin plugin, - Context applicationContext) { - this.plugin = plugin; - this.applicationContext = applicationContext; + GetUserMediaImpl(StateProvider stateProvider, Context applicationContext) { + this.stateProvider = stateProvider; + this.applicationContext = applicationContext; + } + + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); } /** * Includes default constraints set for the audio media type. - * @param audioConstraints MediaConstraints instance to be filled - * with the default constraints for audio media type. + * + * @param audioConstraints MediaConstraints instance to be filled with the default + * constraints for audio media type. */ private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); + new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("echoCancellation", "true")); + new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); + audioConstraints.optional.add(new MediaConstraints.KeyValuePair("echoCancellation", "true")); audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); + new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); audioConstraints.optional.add( - new MediaConstraints.KeyValuePair( - "googDAEchoCancellation", "true")); + new MediaConstraints.KeyValuePair("googDAEchoCancellation", "true")); } /** * Create video capturer via given facing mode - * @param enumerator a CameraEnumerator provided by webrtc - * it can be Camera1Enumerator or Camera2Enumerator - * @param isFacing 'user' mapped with 'front' is true (default) - * 'environment' mapped with 'back' is false - * @param sourceId (String) use this sourceId and ignore facing mode if specified. - * @return VideoCapturer can invoke with startCapture/stopCapture - * null if not matched camera with specified facing mode. + * + * @param enumerator a CameraEnumerator provided by webrtc it can be Camera1Enumerator or + * Camera2Enumerator + * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' + * is false + * @param sourceId (String) use this sourceId and ignore facing mode if specified. + * @return Pair of deviceName to VideoCapturer. Can invoke with startCapture/stopCapture null + * if not matched camera with specified facing mode. */ - private VideoCapturer createVideoCapturer( - CameraEnumerator enumerator, - boolean isFacing, - String sourceId) { - VideoCapturer videoCapturer = null; - + private Pair createVideoCapturer( + CameraEnumerator enumerator, boolean isFacing, String sourceId, CameraEventsHandler cameraEventsHandler) { + VideoCapturer videoCapturer; // if sourceId given, use specified sourceId first final String[] deviceNames = enumerator.getDeviceNames(); - if (sourceId != null) { + if (sourceId != null && !sourceId.equals("")) { for (String name : deviceNames) { if (name.equals(sourceId)) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + videoCapturer = enumerator.createCapturer(name, cameraEventsHandler); if (videoCapturer != null) { Log.d(TAG, "create user specified camera " + name + " succeeded"); - return videoCapturer; + return new Pair<>(name, videoCapturer); } else { Log.d(TAG, "create user specified camera " + name + " failed"); break; // fallback to facing mode @@ -218,43 +294,48 @@ private VideoCapturer createVideoCapturer( String facingStr = isFacing ? "front" : "back"; for (String name : deviceNames) { if (enumerator.isFrontFacing(name) == isFacing) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + videoCapturer = enumerator.createCapturer(name, cameraEventsHandler); if (videoCapturer != null) { Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); - return videoCapturer; + + return new Pair<>(name, videoCapturer); } else { Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); } } } - // should we fallback to available camera automatically? - return videoCapturer; + + // falling back to the first available camera + if (deviceNames.length > 0) { + videoCapturer = enumerator.createCapturer(deviceNames[0], cameraEventsHandler); + Log.d(TAG, "Falling back to the first available camera"); + return new Pair<>(deviceNames[0], videoCapturer); + } + + return null; } /** * Retrieves "facingMode" constraint value. * - * @param mediaConstraints a ConstraintsMap which represents "GUM" - * constraints argument. - * @return String value of "facingMode" constraints in "GUM" or - * null if not specified. + * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument. + * @return String value of "facingMode" constraints in "GUM" or null if not specified. */ private String getFacingMode(ConstraintsMap mediaConstraints) { - return - mediaConstraints == null - ? null - : mediaConstraints.getString("facingMode"); + return mediaConstraints == null ? null : mediaConstraints.getString("facingMode"); } /** * Retrieves "sourceId" constraint value. * - * @param mediaConstraints a ConstraintsMap which represents "GUM" - * constraints argument - * @return String value of "sourceId" optional "GUM" constraint or - * null if not specified. + * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument + * @return String value of "sourceId" optional "GUM" constraint or null if not specified. */ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { + if (mediaConstraints != null + && mediaConstraints.hasKey("deviceId")) { + return mediaConstraints.getString("deviceId"); + } if (mediaConstraints != null && mediaConstraints.hasKey("optional") && mediaConstraints.getType("optional") == ObjectType.Array) { @@ -264,9 +345,7 @@ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { if (optional.getType(i) == ObjectType.Map) { ConstraintsMap option = optional.getMap(i); - if (option.hasKey("sourceId") - && option.getType("sourceId") - == ObjectType.String) { + if (option.hasKey("sourceId") && option.getType("sourceId") == ObjectType.String) { return option.getString("sourceId"); } } @@ -276,271 +355,275 @@ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { return null; } - private AudioTrack getUserAudio(ConstraintsMap constraints) { - MediaConstraints audioConstraints; + private ConstraintsMap getUserAudio(ConstraintsMap constraints, MediaStream stream) { + AudioSwitchManager.instance.start(); + MediaConstraints audioConstraints = new MediaConstraints(); + String deviceId = null; if (constraints.getType("audio") == ObjectType.Boolean) { - audioConstraints = new MediaConstraints(); addDefaultAudioConstraints(audioConstraints); } else { - audioConstraints - = plugin.parseMediaConstraints( - constraints.getMap("audio")); + audioConstraints = MediaConstraintsUtils.parseMediaConstraints(constraints.getMap("audio")); + deviceId = getSourceIdConstraint(constraints.getMap("audio")); } Log.i(TAG, "getUserMedia(audio): " + audioConstraints); - String trackId = plugin.getNextTrackUUID(); - PeerConnectionFactory pcFactory = plugin.mFactory; + String trackId = stateProvider.getNextTrackUUID(); + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); AudioSource audioSource = pcFactory.createAudioSource(audioConstraints); + AudioTrack track = pcFactory.createAudioTrack(trackId, audioSource); + stream.addTrack(track); + + stateProvider.putLocalTrack(track.id(), new LocalAudioTrack(track)); + + ConstraintsMap trackParams = new ConstraintsMap(); + trackParams.putBoolean("enabled", track.enabled()); + trackParams.putString("id", track.id()); + trackParams.putString("kind", "audio"); + trackParams.putString("label", track.id()); + trackParams.putString("readyState", track.state().toString()); + trackParams.putBoolean("remote", false); + + if (deviceId == null) { + if (VERSION.SDK_INT >= VERSION_CODES.M) { + deviceId = "" + getPreferredInputDevice(preferredInput); + } + } - return pcFactory.createAudioTrack(trackId, audioSource); + ConstraintsMap settings = new ConstraintsMap(); + settings.putString("deviceId", deviceId); + settings.putString("kind", "audioinput"); + settings.putBoolean("autoGainControl", true); + settings.putBoolean("echoCancellation", true); + settings.putBoolean("noiseSuppression", true); + settings.putInt("channelCount", 1); + settings.putInt("latency", 0); + trackParams.putMap("settings", settings.toMap()); + + return trackParams; } /** - * Implements {@code getUserMedia} without knowledge whether the necessary - * permissions have already been granted. If the necessary permissions have - * not been granted yet, they will be requested. + * Implements {@code getUserMedia} without knowledge whether the necessary permissions have + * already been granted. If the necessary permissions have not been granted yet, they will be + * requested. */ void getUserMedia( - final ConstraintsMap constraints, - final Result result, - final MediaStream mediaStream) { + final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { - // TODO: change getUserMedia constraints format to support new syntax - // constraint format seems changed, and there is no mandatory any more. - // and has a new syntax/attrs to specify resolution - // should change `parseConstraints()` according - // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints + final ArrayList requestPermissions = new ArrayList<>(); - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; + if (constraints.hasKey("audio")) { + switch (constraints.getType("audio")) { + case Boolean: + if (constraints.getBoolean("audio")) { + requestPermissions.add(PERMISSION_AUDIO); + } + break; + case Map: + requestPermissions.add(PERMISSION_AUDIO); + break; + default: + break; + } + } - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); + if (constraints.hasKey("video")) { + switch (constraints.getType("video")) { + case Boolean: + if (constraints.getBoolean("video")) { + requestPermissions.add(PERMISSION_VIDEO); + } + break; + case Map: + requestPermissions.add(PERMISSION_VIDEO); + break; + default: + break; } } - boolean requestScreenCapturer = videoConstraintsMandatory.hasKey("chromeMediaSource") && - videoConstraintsMandatory.getString("chromeMediaSource").equals("desktop"); + // According to step 2 of the getUserMedia() algorithm, + // requestedMediaTypes is the set of media types in constraints with + // either a dictionary value or a value of "true". + // According to step 3 of the getUserMedia() algorithm, if + // requestedMediaTypes is the empty set, the method invocation fails + // with a TypeError. + if (requestPermissions.isEmpty()) { + resultError("getUserMedia", "TypeError, constraints requests no media types", result); + return; + } - final ArrayList requestPermissions = new ArrayList<>(); + /// Only systems pre-M, no additional permission request is needed. + if (VERSION.SDK_INT < VERSION_CODES.M) { + getUserMedia(constraints, result, mediaStream, requestPermissions); + return; + } + + requestPermissions( + requestPermissions, + /* successCallback */ new Callback() { + @Override + public void invoke(Object... args) { + List grantedPermissions = (List) args[0]; - if(requestScreenCapturer) - { - final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; - screenRequestPremissions(new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult( - int requestCode, - Bundle resultData) { - - /*Create ScreenCapture*/ - int resultCode = resultData.getInt(GRANT_RESULTS); - Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); - - if (resultCode != Activity.RESULT_OK) { - result.error( null, - "User didn't give permission to capture the screen.", null); - return; + getUserMedia(constraints, result, mediaStream, grantedPermissions); } + }, + /* errorCallback */ new Callback() { + @Override + public void invoke(Object... args) { + // According to step 10 Permission Failure of the + // getUserMedia() algorithm, if the user has denied + // permission, fail "with a new DOMException object whose + // name attribute has the value NotAllowedError." + resultError("getUserMedia", "DOMException, NotAllowedError", result); + } + }); + } - MediaStreamTrack[] tracks = new MediaStreamTrack[1]; - VideoCapturer videoCapturer = null; - videoCapturer = new ScreenCapturerAndroid( - mediaProjectionData, new MediaProjection.Callback() { + void getDisplayMedia( + final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { + if (mediaProjectionData == null) { + screenRequestPermissions( + new ResultReceiver(new Handler(Looper.getMainLooper())) { @Override - public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error( null, - "User revoked permission to capture the screen.", null); + protected void onReceiveResult(int requestCode, Bundle resultData) { + Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + int resultCode = resultData.getInt(GRANT_RESULTS); + + if (resultCode != Activity.RESULT_OK) { + resultError("screenRequestPermissions", "User didn't give permission to capture the screen.", result); + return; + } + getDisplayMedia(result, mediaStream, mediaProjectionData); } }); + } else { + getDisplayMedia(result, mediaStream, mediaProjectionData); + } + } - if (videoCapturer != null) { - - PeerConnectionFactory pcFactory = plugin.mFactory; - VideoSource videoSource = pcFactory.createVideoSource(videoCapturer); + private void getDisplayMedia(final Result result, final MediaStream mediaStream, final Intent mediaProjectionData) { + /* Create ScreenCapture */ + VideoTrack displayTrack = null; + VideoCapturer videoCapturer = null; + videoCapturer = + new OrientationAwareScreenCapturer( + mediaProjectionData, + new MediaProjection.Callback() { + @Override + public void onStop() { + super.onStop(); + // After Huawei P30 and Android 10 version test, the onstop method is called, which will not affect the next process, + // and there is no need to call the resulterror method + //resultError("MediaProjection.Callback()", "User revoked permission to capture the screen.", result); + } + }); + if (videoCapturer == null) { + resultError("screenRequestPermissions", "GetDisplayMediaFailed, User revoked permission to capture the screen.", result); + return; + } - // Fall back to defaults if keys are missing. - int width - = videoConstraintsMandatory2.hasKey("minWidth") - ? videoConstraintsMandatory2.getInt("minWidth") - : DEFAULT_WIDTH; - int height - = videoConstraintsMandatory2.hasKey("minHeight") - ? videoConstraintsMandatory2.getInt("minHeight") - : DEFAULT_HEIGHT; - int fps - = videoConstraintsMandatory2.hasKey("minFrameRate") - ? videoConstraintsMandatory2.getInt("minFrameRate") - : DEFAULT_FPS; + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(true); - videoCapturer.startCapture(width, height, fps); + String threadName = Thread.currentThread().getName() + "_texture_screen_thread"; + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - String trackId = plugin.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); + WindowManager wm = + (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); - Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); - videoSource.adaptOutputFormat(width, height, fps); + Display display = wm.getDefaultDisplay(); + Point size = new Point(); + display.getRealSize(size); - tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); + info.width = size.x; + info.height = size.y; + info.fps = DEFAULT_FPS; + info.isScreenCapture = true; + info.capturer = videoCapturer; - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap successResult = new ConstraintsMap(); + videoCapturer.startCapture(info.width, info.height, info.fps); + Log.d(TAG, "OrientationAwareScreenCapturer.startCapture: " + info.width + "x" + info.height + "@" + info.fps); - for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, info); - String id = track.id(); + displayTrack = pcFactory.createVideoTrack(trackId, videoSource); - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - plugin.localTracks.put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); - } - } + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + ConstraintsMap successResult = new ConstraintsMap(); - String streamId = mediaStream.label(); + if (displayTrack != null) { + String id = displayTrack.id(); - Log.d(TAG, "MediaStream id: " + streamId); - plugin.localStreams.put(streamId, mediaStream); + LocalVideoTrack displayLocalVideoTrack = new LocalVideoTrack(displayTrack); + videoSource.setVideoProcessor(displayLocalVideoTrack); - successResult.putString("streamId", streamId); - successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", audioTracks.toArrayList()); - result.success(successResult.toMap()); - } + stateProvider.putLocalTrack(id, displayLocalVideoTrack); - } - }); - return; - } + ConstraintsMap track_ = new ConstraintsMap(); + String kind = displayTrack.kind(); - if (constraints.hasKey("audio")) { - switch (constraints.getType("audio")) { - case Boolean: - if (constraints.getBoolean("audio")) { - requestPermissions.add(PERMISSION_AUDIO); - } - break; - case Map: - requestPermissions.add(PERMISSION_AUDIO); - break; - default: - break; - } - } + track_.putBoolean("enabled", displayTrack.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", displayTrack.state().toString()); + track_.putBoolean("remote", false); - if (constraints.hasKey("video") && !requestScreenCapturer) { - switch (constraints.getType("video")) { - case Boolean: - if (constraints.getBoolean("video")) { - requestPermissions.add(PERMISSION_VIDEO); - } - break; - case Map: - requestPermissions.add(PERMISSION_VIDEO); - break; - default: - break; - } + videoTracks.pushMap(track_); + mediaStream.addTrack(displayTrack); } - // According to step 2 of the getUserMedia() algorithm, - // requestedMediaTypes is the set of media types in constraints with - // either a dictionary value or a value of "true". - // According to step 3 of the getUserMedia() algorithm, if - // requestedMediaTypes is the empty set, the method invocation fails - // with a TypeError. - if (requestPermissions.isEmpty()) { - result.error( - "TypeError", - "constraints requests no media types", null); - return; - } + String streamId = mediaStream.getId(); - requestPermissions( - requestPermissions, - /* successCallback */ new Callback() { - @Override - public void invoke(Object... args) { - List grantedPermissions = (List) args[0]; - - getUserMedia( - constraints, - result, - mediaStream, - grantedPermissions); - } - }, - /* errorCallback */ new Callback() { - @Override - public void invoke(Object... args) { - // According to step 10 Permission Failure of the - // getUserMedia() algorithm, if the user has denied - // permission, fail "with a new DOMException object whose - // name attribute has the value NotAllowedError." - result.error("DOMException", "NotAllowedError", null); - } - } - ); + Log.d(TAG, "MediaStream id: " + streamId); + stateProvider.putLocalStream(streamId, mediaStream); + successResult.putString("streamId", streamId); + successResult.putArray("audioTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); + result.success(successResult.toMap()); } /** - * Implements {@code getUserMedia} with the knowledge that the necessary - * permissions have already been granted. If the necessary permissions have - * not been granted yet, they will NOT be requested. + * Implements {@code getUserMedia} with the knowledge that the necessary permissions have already + * been granted. If the necessary permissions have not been granted yet, they will NOT be + * requested. */ private void getUserMedia( ConstraintsMap constraints, Result result, MediaStream mediaStream, List grantedPermissions) { - MediaStreamTrack[] tracks = new MediaStreamTrack[2]; + ConstraintsMap[] trackParams = new ConstraintsMap[2]; // If we fail to create either, destroy the other one and fail. if ((grantedPermissions.contains(PERMISSION_AUDIO) - && (tracks[0] = getUserAudio(constraints)) == null) + && (trackParams[0] = getUserAudio(constraints, mediaStream)) == null) || (grantedPermissions.contains(PERMISSION_VIDEO) - && (tracks[1] = getUserVideo(constraints)) == null)) { - for (MediaStreamTrack track : tracks) { + && (trackParams[1] = getUserVideo(constraints, mediaStream)) == null)) { + for (MediaStreamTrack track : mediaStream.audioTracks) { + if (track != null) { + track.dispose(); + } + } + for (MediaStreamTrack track : mediaStream.videoTracks) { if (track != null) { track.dispose(); } } - // XXX The following does not follow the getUserMedia() algorithm // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "GetUserMediaFailed", - "Failed to create new track", null); + resultError("getUserMedia", "Failed to create new track.", result); return; } @@ -548,179 +631,458 @@ private void getUserMedia( ConstraintsArray videoTracks = new ConstraintsArray(); ConstraintsMap successResult = new ConstraintsMap(); - for (MediaStreamTrack track : tracks) { - if (track == null) { + for (ConstraintsMap trackParam : trackParams) { + if (trackParam == null) { continue; } - - String id = track.id(); - - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - plugin.localTracks.put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); + if (trackParam.getString("kind").equals("audio")) { + audioTracks.pushMap(trackParam); } else { - videoTracks.pushMap(track_); + videoTracks.pushMap(trackParam); } } - String streamId = mediaStream.label(); - + String streamId = mediaStream.getId(); Log.d(TAG, "MediaStream id: " + streamId); - plugin.localStreams.put(streamId, mediaStream); + stateProvider.putLocalStream(streamId, mediaStream); successResult.putString("streamId", streamId); successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); result.success(successResult.toMap()); } + private boolean isFacing = true; + + /** + * @return Returns the integer at the key, or the `ideal` property if it is a map. + */ + @Nullable + private Integer getConstrainInt(@Nullable ConstraintsMap constraintsMap, String key) { + if (constraintsMap == null) { + return null; + } + + if (constraintsMap.getType(key) == ObjectType.Number) { + try { + return constraintsMap.getInt(key); + } catch (Exception e) { + // Could be a double instead + return (int) Math.round(constraintsMap.getDouble(key)); + } + } + + if (constraintsMap.getType(key) == ObjectType.String) { + try { + return Integer.parseInt(constraintsMap.getString(key)); + } catch (Exception e) { + // Could be a double instead + return (int) Math.round(Double.parseDouble(constraintsMap.getString(key))); + } + } + + if (constraintsMap.getType(key) == ObjectType.Map) { + ConstraintsMap innerMap = constraintsMap.getMap(key); + if (constraintsMap.getType("ideal") == ObjectType.Number) { + return innerMap.getInt("ideal"); + } + } - private VideoTrack getUserVideo(ConstraintsMap constraints) { + return null; + } + + private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream mediaStream) { ConstraintsMap videoConstraintsMap = null; ConstraintsMap videoConstraintsMandatory = null; if (constraints.getType("video") == ObjectType.Map) { videoConstraintsMap = constraints.getMap("video"); if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); } } Log.i(TAG, "getUserMedia(video): " + videoConstraintsMap); - // NOTE: to support Camera2, the device should: - // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP - // 2. all camera support level should greater than LEGACY - // see: https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL - // TODO Enable camera2 enumerator - Context context = plugin.getContext(); - CameraEnumerator cameraEnumerator; - - if (Camera2Enumerator.isSupported(context)) { - Log.d(TAG, "Creating video capturer using Camera2 API."); - cameraEnumerator = new Camera2Enumerator(context); - } else { - Log.d(TAG, "Creating video capturer using Camera1 API."); - cameraEnumerator = new Camera1Enumerator(false); - } + // NOTE: to support Camera2, the device should: + // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + // 2. all camera support level should greater than LEGACY + // see: + // https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL + // TODO Enable camera2 enumerator + CameraEnumerator cameraEnumerator; + + if (Camera2Enumerator.isSupported(applicationContext)) { + Log.d(TAG, "Creating video capturer using Camera2 API."); + cameraEnumerator = new Camera2Enumerator(applicationContext); + } else { + Log.d(TAG, "Creating video capturer using Camera1 API."); + cameraEnumerator = new Camera1Enumerator(false); + } + + String facingMode = getFacingMode(videoConstraintsMap); + isFacing = facingMode == null || !facingMode.equals("environment"); + String deviceId = getSourceIdConstraint(videoConstraintsMap); + CameraEventsHandler cameraEventsHandler = new CameraEventsHandler(); + Pair result = createVideoCapturer(cameraEnumerator, isFacing, deviceId, cameraEventsHandler); - String facingMode = getFacingMode(videoConstraintsMap); - boolean isFacing - = facingMode == null || !facingMode.equals("environment"); - String sourceId = getSourceIdConstraint(videoConstraintsMap); + if (result == null) { + return null; + } - VideoCapturer videoCapturer - = createVideoCapturer(cameraEnumerator, isFacing, sourceId); + deviceId = result.first; + VideoCapturer videoCapturer = result.second; - if (videoCapturer == null) { + if (facingMode == null && cameraEnumerator.isFrontFacing(deviceId)) { + facingMode = "user"; + } else if (facingMode == null && cameraEnumerator.isBackFacing(deviceId)) { + facingMode = "environment"; + } + // else, leave facingMode as it was + + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(false); + String threadName = Thread.currentThread().getName() + "_texture_camera_thread"; + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + + if (surfaceTextureHelper == null) { + Log.e(TAG, "surfaceTextureHelper is null"); return null; } - PeerConnectionFactory pcFactory = plugin.mFactory; - VideoSource videoSource = pcFactory.createVideoSource(videoCapturer); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - // Fall back to defaults if keys are missing. - int width - = videoConstraintsMandatory.hasKey("minWidth") + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); + + Integer videoWidth = getConstrainInt(videoConstraintsMap, "width"); + int targetWidth = videoWidth != null + ? videoWidth + : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") ? videoConstraintsMandatory.getInt("minWidth") : DEFAULT_WIDTH; - int height - = videoConstraintsMandatory.hasKey("minHeight") + + Integer videoHeight = getConstrainInt(videoConstraintsMap, "height"); + int targetHeight = videoHeight != null + ? videoHeight + : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") ? videoConstraintsMandatory.getInt("minHeight") : DEFAULT_HEIGHT; - int fps - = videoConstraintsMandatory.hasKey("minFrameRate") + + Integer videoFrameRate = getConstrainInt(videoConstraintsMap, "frameRate"); + int targetFps = videoFrameRate != null + ? videoFrameRate + : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") ? videoConstraintsMandatory.getInt("minFrameRate") : DEFAULT_FPS; - videoCapturer.startCapture(width, height, fps); + info.width = targetWidth; + info.height = targetHeight; + info.fps = targetFps; + info.capturer = videoCapturer; + info.cameraName = deviceId; + + // Find actual capture format. + Size actualSize = null; + if (videoCapturer instanceof Camera1Capturer) { + int cameraId = Camera1Helper.getCameraId(deviceId); + actualSize = Camera1Helper.findClosestCaptureFormat(cameraId, targetWidth, targetHeight); + } else if (videoCapturer instanceof Camera2Capturer) { + CameraManager cameraManager = (CameraManager) applicationContext.getSystemService(Context.CAMERA_SERVICE); + actualSize = Camera2Helper.findClosestCaptureFormat(cameraManager, deviceId, targetWidth, targetHeight); + } + + if (actualSize != null) { + info.width = actualSize.width; + info.height = actualSize.height; + } + + info.cameraEventsHandler = cameraEventsHandler; + videoCapturer.startCapture(targetWidth, targetHeight, targetFps); + + cameraEventsHandler.waitForCameraOpen(); + + + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, info); + mSurfaceTextureHelpers.put(trackId, surfaceTextureHelper); - String trackId = plugin.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); + Log.d(TAG, "Target: " + targetWidth + "x" + targetHeight + "@" + targetFps + ", Actual: " + info.width + "x" + info.height + "@" + info.fps); - Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); - videoSource.adaptOutputFormat(width, height, fps); + VideoTrack track = pcFactory.createVideoTrack(trackId, videoSource); + mediaStream.addTrack(track); - return pcFactory.createVideoTrack(trackId, videoSource); + LocalVideoTrack localVideoTrack = new LocalVideoTrack(track); + videoSource.setVideoProcessor(localVideoTrack); + + stateProvider.putLocalTrack(track.id(),localVideoTrack); + + ConstraintsMap trackParams = new ConstraintsMap(); + + trackParams.putBoolean("enabled", track.enabled()); + trackParams.putString("id", track.id()); + trackParams.putString("kind", "video"); + trackParams.putString("label", track.id()); + trackParams.putString("readyState", track.state().toString()); + trackParams.putBoolean("remote", false); + + ConstraintsMap settings = new ConstraintsMap(); + settings.putString("deviceId", deviceId); + settings.putString("kind", "videoinput"); + settings.putInt("width", info.width); + settings.putInt("height", info.height); + settings.putInt("frameRate", info.fps); + if (facingMode != null) settings.putString("facingMode", facingMode); + trackParams.putMap("settings", settings.toMap()); + + return trackParams; } void removeVideoCapturer(String id) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer != null) { + VideoCapturerInfoEx info = mVideoCapturers.get(id); + if (info != null) { try { - videoCapturer.stopCapture(); + info.capturer.stopCapture(); + if (info.cameraEventsHandler != null) { + info.cameraEventsHandler.waitForCameraClosed(); + } } catch (InterruptedException e) { Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); + } finally { + info.capturer.dispose(); + mVideoCapturers.remove(id); + SurfaceTextureHelper helper = mSurfaceTextureHelpers.get(id); + if (helper != null) { + helper.stopListening(); + helper.dispose(); + mSurfaceTextureHelpers.remove(id); + } } - mVideoCapturers.remove(id); } } + @RequiresApi(api = VERSION_CODES.M) private void requestPermissions( final ArrayList permissions, final Callback successCallback, final Callback errorCallback) { - PermissionUtils.Callback callback = new PermissionUtils.Callback() { - @Override - public void invoke(String[] permissions_, int[] grantResults) { - List grantedPermissions = new ArrayList<>(); - List deniedPermissions = new ArrayList<>(); - - for (int i = 0; i < permissions_.length; ++i) { - String permission = permissions_[i]; - int grantResult = grantResults[i]; - - if (grantResult == PackageManager.PERMISSION_GRANTED) { - grantedPermissions.add(permission); + PermissionUtils.Callback callback = + (permissions_, grantResults) -> { + List grantedPermissions = new ArrayList<>(); + List deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < permissions_.length; ++i) { + String permission = permissions_[i]; + int grantResult = grantResults[i]; + + if (grantResult == PackageManager.PERMISSION_GRANTED) { + grantedPermissions.add(permission); + } else { + deniedPermissions.add(permission); + } + } + + // Success means that all requested permissions were granted. + for (String p : permissions) { + if (!grantedPermissions.contains(p)) { + // According to step 6 of the getUserMedia() algorithm + // "if the result is denied, jump to the step Permission + // Failure." + errorCallback.invoke(deniedPermissions); + return; + } + } + successCallback.invoke(grantedPermissions); + }; + + final Activity activity = stateProvider.getActivity(); + final Context context = stateProvider.getApplicationContext(); + PermissionUtils.requestPermissions( + context, + activity, + permissions.toArray(new String[permissions.size()]), callback); + } + + void switchCamera(String id, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(id).capturer; + if (videoCapturer == null) { + resultError("switchCamera", "Video capturer not found for id: " + id, result); + return; + } + + CameraEnumerator cameraEnumerator; + + if (Camera2Enumerator.isSupported(applicationContext)) { + Log.d(TAG, "Creating video capturer using Camera2 API."); + cameraEnumerator = new Camera2Enumerator(applicationContext); + } else { + Log.d(TAG, "Creating video capturer using Camera1 API."); + cameraEnumerator = new Camera1Enumerator(false); + } + // if sourceId given, use specified sourceId first + final String[] deviceNames = cameraEnumerator.getDeviceNames(); + for (String name : deviceNames) { + if (cameraEnumerator.isFrontFacing(name) == !isFacing) { + CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera( + new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + isFacing = !isFacing; + result.success(b); + } + + @Override + public void onCameraSwitchError(String s) { + resultError("switchCamera", "Switching camera failed: " + id, result); + } + }, name); + return; + } + } + resultError("switchCamera", "Switching camera failed: " + id, result); + } + + /** + * Creates and starts recording of local stream to file + * + * @param path to the file for record + * @param videoTrack to record or null if only audio needed + * @param audioChannel channel for recording or null + * @throws Exception lot of different exceptions, pass back to dart layer to print them at least + */ + void startRecordingToFile( + String path, Integer id, @Nullable VideoTrack videoTrack, @Nullable AudioChannel audioChannel) + throws Exception { + AudioSamplesInterceptor interceptor = null; + if (audioChannel == AudioChannel.INPUT) { + interceptor = inputSamplesInterceptor; + } else if (audioChannel == AudioChannel.OUTPUT) { + if (outputSamplesInterceptor == null) { + outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule); + } + interceptor = outputSamplesInterceptor; + } + MediaRecorderImpl mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor); + mediaRecorder.startRecording(new File(path)); + mediaRecorders.append(id, mediaRecorder); + } + + void stopRecording(Integer id, String albumName) { + try { + MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); + if (mediaRecorder != null) { + mediaRecorder.stopRecording(); + mediaRecorders.remove(id); + File file = mediaRecorder.getRecordFile(); + Uri collection; + + if (file != null) { + ContentValues values = new ContentValues(); + values.put(MediaStore.Video.Media.TITLE, file.getName()); + values.put(MediaStore.Video.Media.DISPLAY_NAME, file.getName()); + values.put(MediaStore.Video.Media.ALBUM, albumName); + values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); + values.put(MediaStore.Video.Media.DATE_ADDED, System.currentTimeMillis() / 1000); + values.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis()); + + //Android version above 9 MediaStore uses RELATIVE_PATH + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + values.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/" + albumName); + values.put(MediaStore.Video.Media.IS_PENDING, 1); + + collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY); } else { - deniedPermissions.add(permission); + //Android version 9 and below MediaStore uses DATA + values.put(MediaStore.Video.Media.DATA, "/storage/emulated/0/Movies/" + albumName + "/" + file.getName()); + + collection = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; } - } - // Success means that all requested permissions were granted. - for (String p : permissions) { - if (!grantedPermissions.contains(p)) { - // According to step 6 of the getUserMedia() algorithm - // "if the result is denied, jump to the step Permission - // Failure." - errorCallback.invoke(deniedPermissions); - return; + ContentResolver resolver = applicationContext.getContentResolver(); + Uri uriSavedMedia = resolver.insert(collection, values); + + assert uriSavedMedia != null; + ParcelFileDescriptor pfd = resolver.openFileDescriptor(uriSavedMedia, "w"); + assert pfd != null; + FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor()); + + InputStream in = new FileInputStream(file); + + byte[] buf = new byte[8192]; + int len; + + while ((len = in.read(buf)) > 0) { + out.write(buf, 0, len); } + + out.close(); + in.close(); + pfd.close(); + values.clear(); } - successCallback.invoke(grantedPermissions); } - }; + } catch(Exception e){ + + } - PermissionUtils.requestPermissions( - plugin, - permissions.toArray(new String[permissions.size()]), - callback); } - void switchCamera(String id) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer != null) { - CameraVideoCapturer cameraVideoCapturer - = (CameraVideoCapturer) videoCapturer; - cameraVideoCapturer.switchCamera(null); + + + public void reStartCamera(IsCameraEnabled getCameraId) { + for (Map.Entry item : mVideoCapturers.entrySet()) { + if (!item.getValue().isScreenCapture && getCameraId.isEnabled(item.getKey())) { + item.getValue().capturer.startCapture( + item.getValue().width, + item.getValue().height, + item.getValue().fps + ); + } + } + } + + public interface IsCameraEnabled { + boolean isEnabled(String id); + } + + public static class VideoCapturerInfoEx extends VideoCapturerInfo { + public CameraEventsHandler cameraEventsHandler; + } + + public VideoCapturerInfoEx getCapturerInfo(String trackId) { + return mVideoCapturers.get(trackId); + } + + @RequiresApi(api = VERSION_CODES.M) + void setPreferredInputDevice(String deviceId) { + android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); + final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); + if (devices.length > 0) { + for (int i = 0; i < devices.length; i++) { + AudioDeviceInfo device = devices[i]; + if(deviceId.equals(AudioUtils.getAudioDeviceId(device))) { + preferredInput = device; + audioDeviceModule.setPreferredInputDevice(preferredInput); + return; + } + } + } + } + + @RequiresApi(api = VERSION_CODES.M) + int getPreferredInputDevice(AudioDeviceInfo deviceInfo) { + if (deviceInfo == null) { + return -1; + } + android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); + final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); + for (int i = 0; i < devices.length; i++) { + if (devices[i].getId() == deviceInfo.getId()) { + return i; + } } + return -1; } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/LocalTrack.java b/android/src/main/java/com/cloudwebrtc/webrtc/LocalTrack.java new file mode 100644 index 0000000000..6135fdf1b3 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/LocalTrack.java @@ -0,0 +1,31 @@ +package com.cloudwebrtc.webrtc; + +import org.webrtc.MediaStreamTrack; + +public class LocalTrack { + public LocalTrack(MediaStreamTrack track) { + this.track = track; + } + + public MediaStreamTrack track; + + public void dispose() { + track.dispose(); + } + + public boolean enabled() { + return track.enabled(); + } + + public void setEnabled(boolean enabled) { + track.setEnabled(enabled); + } + + public String id() { + return track.id(); + } + + public String kind() { + return track.kind(); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java new file mode 100644 index 0000000000..8444c0e66b --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -0,0 +1,2293 @@ +package com.cloudwebrtc.webrtc; + +import static com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils.parseMediaConstraints; + +import android.app.Activity; +import android.content.Context; +import android.content.pm.PackageManager; +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.hardware.Camera.CameraInfo; +import android.media.MediaRecorder; +import android.media.AudioAttributes; +import android.media.AudioDeviceInfo; +import android.os.Build; +import android.util.Log; +import android.util.LongSparseArray; +import android.view.Surface; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.cloudwebrtc.webrtc.audio.AudioDeviceKind; +import com.cloudwebrtc.webrtc.audio.AudioProcessingController; +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.audio.AudioUtils; +import com.cloudwebrtc.webrtc.audio.LocalAudioTrack; +import com.cloudwebrtc.webrtc.audio.PlaybackSamplesReadyCallbackAdapter; +import com.cloudwebrtc.webrtc.audio.RecordSamplesReadyCallbackAdapter; +import com.cloudwebrtc.webrtc.record.AudioChannel; +import com.cloudwebrtc.webrtc.record.FrameCapturer; +import com.cloudwebrtc.webrtc.utils.AnyThreadResult; +import com.cloudwebrtc.webrtc.utils.Callback; +import com.cloudwebrtc.webrtc.utils.ConstraintsArray; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.EglUtils; +import com.cloudwebrtc.webrtc.utils.ObjectType; +import com.cloudwebrtc.webrtc.utils.PermissionUtils; +import com.cloudwebrtc.webrtc.utils.Utils; +import com.cloudwebrtc.webrtc.video.VideoCapturerInfo; +import com.cloudwebrtc.webrtc.video.camera.CameraUtils; +import com.cloudwebrtc.webrtc.video.camera.Point; +import com.cloudwebrtc.webrtc.video.LocalVideoTrack; +import com.twilio.audioswitch.AudioDevice; + +import org.webrtc.AudioTrack; +import org.webrtc.CryptoOptions; +import org.webrtc.DtmfSender; +import org.webrtc.EglBase; +import org.webrtc.IceCandidate; +import org.webrtc.Logging; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaConstraints.KeyValuePair; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnection; +import org.webrtc.PeerConnection.BundlePolicy; +import org.webrtc.PeerConnection.CandidateNetworkPolicy; +import org.webrtc.PeerConnection.ContinualGatheringPolicy; +import org.webrtc.PeerConnection.IceServer; +import org.webrtc.PeerConnection.IceServer.Builder; +import org.webrtc.PeerConnection.IceTransportsType; +import org.webrtc.PeerConnection.KeyType; +import org.webrtc.PeerConnection.RTCConfiguration; +import org.webrtc.PeerConnection.RtcpMuxPolicy; +import org.webrtc.PeerConnection.SdpSemantics; +import org.webrtc.PeerConnection.TcpCandidatePolicy; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.PeerConnectionFactory.InitializationOptions; +import org.webrtc.PeerConnectionFactory.Options; +import org.webrtc.RtpCapabilities; +import org.webrtc.RtpSender; +import org.webrtc.SdpObserver; +import org.webrtc.SessionDescription; +import org.webrtc.SessionDescription.Type; +import org.webrtc.VideoTrack; +import org.webrtc.audio.AudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.video.CustomVideoDecoderFactory; +import org.webrtc.video.CustomVideoEncoderFactory; + +import java.io.File; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.UUID; + +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; +import io.flutter.view.TextureRegistry; +import io.flutter.view.TextureRegistry.SurfaceTextureEntry; + +public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { + static public final String TAG = "FlutterWebRTCPlugin"; + + private final Map mPeerConnectionObservers = new HashMap<>(); + private final BinaryMessenger messenger; + private final Context context; + private final TextureRegistry textures; + private PeerConnectionFactory mFactory; + private final Map localStreams = new HashMap<>(); + private final Map localTracks = new HashMap<>(); + private final LongSparseArray renders = new LongSparseArray<>(); + + public RecordSamplesReadyCallbackAdapter recordSamplesReadyCallbackAdapter; + + public PlaybackSamplesReadyCallbackAdapter playbackSamplesReadyCallbackAdapter; + + /** + * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce + * complexity and to (somewhat) separate concerns. + */ + private GetUserMediaImpl getUserMediaImpl; + + private CameraUtils cameraUtils; + + private AudioDeviceModule audioDeviceModule; + + private FlutterRTCFrameCryptor frameCryptor; + + private Activity activity; + + private CustomVideoEncoderFactory videoEncoderFactory; + + private CustomVideoDecoderFactory videoDecoderFactory; + + public AudioProcessingController audioProcessingController; + + MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry) { + this.context = context; + this.textures = textureRegistry; + this.messenger = messenger; + } + + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + + void dispose() { + for (final MediaStream mediaStream : localStreams.values()) { + streamDispose(mediaStream); + mediaStream.dispose(); + } + localStreams.clear(); + for (final LocalTrack track : localTracks.values()) { + track.dispose(); + } + localTracks.clear(); + for (final PeerConnectionObserver connection : mPeerConnectionObservers.values()) { + peerConnectionDispose(connection); + } + mPeerConnectionObservers.clear(); + } + private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List forceSWCodecList, + @Nullable ConstraintsMap androidAudioConfiguration) { + if (mFactory != null) { + return; + } + + PeerConnectionFactory.initialize( + InitializationOptions.builder(context) + .setEnableInternalTracer(true) + .createInitializationOptions()); + + getUserMediaImpl = new GetUserMediaImpl(this, context); + + cameraUtils = new CameraUtils(getUserMediaImpl, activity); + + frameCryptor = new FlutterRTCFrameCryptor(this); + + AudioAttributes audioAttributes = null; + if (androidAudioConfiguration != null) { + Integer usageType = AudioUtils.getAudioAttributesUsageTypeForString( + androidAudioConfiguration.getString("androidAudioAttributesUsageType")); + Integer contentType = AudioUtils.getAudioAttributesContentTypeFromString( + androidAudioConfiguration.getString("androidAudioAttributesContentType")); + + // Warn if one is provided without the other. + if (usageType == null ^ contentType == null) { + Log.w(TAG, "usageType and contentType must both be provided!"); + } + + if (usageType != null && contentType != null) { + audioAttributes = new AudioAttributes.Builder() + .setUsage(usageType) + .setContentType(contentType) + .build(); + } + } + JavaAudioDeviceModule.Builder audioDeviceModuleBuilder = JavaAudioDeviceModule.builder(context); + + recordSamplesReadyCallbackAdapter = new RecordSamplesReadyCallbackAdapter(); + playbackSamplesReadyCallbackAdapter = new PlaybackSamplesReadyCallbackAdapter(); + + if(bypassVoiceProcessing) { + audioDeviceModuleBuilder.setUseHardwareAcousticEchoCanceler(false) + .setUseHardwareNoiseSuppressor(false) + .setUseStereoInput(true) + .setUseStereoOutput(true) + .setAudioSource(MediaRecorder.AudioSource.MIC); + } else { + boolean useHardwareAudioProcessing = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q; + boolean useLowLatency = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O; + audioDeviceModuleBuilder.setUseHardwareAcousticEchoCanceler(useHardwareAudioProcessing) + .setUseLowLatency(useLowLatency) + .setUseHardwareNoiseSuppressor(useHardwareAudioProcessing); + } + + audioDeviceModuleBuilder.setSamplesReadyCallback(recordSamplesReadyCallbackAdapter); + audioDeviceModuleBuilder.setPlaybackSamplesReadyCallback(playbackSamplesReadyCallbackAdapter); + + recordSamplesReadyCallbackAdapter.addCallback(getUserMediaImpl.inputSamplesInterceptor); + + recordSamplesReadyCallbackAdapter.addCallback(new JavaAudioDeviceModule.SamplesReadyCallback() { + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + for(LocalTrack track : localTracks.values()) { + if (track instanceof LocalAudioTrack) { + ((LocalAudioTrack) track).onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + } + }); + + if (audioAttributes != null) { + audioDeviceModuleBuilder.setAudioAttributes(audioAttributes); + } + + audioDeviceModule = audioDeviceModuleBuilder.createAudioDeviceModule(); + + if(!bypassVoiceProcessing) { + if(JavaAudioDeviceModule.isBuiltInNoiseSuppressorSupported()) { + audioDeviceModule.setNoiseSuppressorEnabled(true); + } + } + + + getUserMediaImpl.audioDeviceModule = (JavaAudioDeviceModule) audioDeviceModule; + + final Options options = new Options(); + options.networkIgnoreMask = networkIgnoreMask; + + final PeerConnectionFactory.Builder factoryBuilder = PeerConnectionFactory.builder() + .setOptions(options); + + // Initialize EGL contexts required for HW acceleration. + EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); + + videoEncoderFactory = new CustomVideoEncoderFactory(eglContext, true, true); + videoDecoderFactory = new CustomVideoDecoderFactory(eglContext); + + factoryBuilder + .setVideoEncoderFactory(videoEncoderFactory) + .setVideoDecoderFactory(videoDecoderFactory); + + videoDecoderFactory.setForceSWCodec(forceSWCodec); + videoDecoderFactory.setForceSWCodecList(forceSWCodecList); + videoEncoderFactory.setForceSWCodec(forceSWCodec); + videoEncoderFactory.setForceSWCodecList(forceSWCodecList); + + audioProcessingController = new AudioProcessingController(); + + factoryBuilder.setAudioProcessingFactory(audioProcessingController.externalAudioProcessingFactory); + + mFactory = factoryBuilder + .setAudioDeviceModule(audioDeviceModule) + .createPeerConnectionFactory(); + + } + + @Override + public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { + + final AnyThreadResult result = new AnyThreadResult(notSafeResult); + switch (call.method) { + case "initialize": { + int networkIgnoreMask = Options.ADAPTER_TYPE_UNKNOWN; + Map options = call.argument("options"); + ConstraintsMap constraintsMap = new ConstraintsMap(options); + if (constraintsMap.hasKey("networkIgnoreMask") + && constraintsMap.getType("networkIgnoreMask") == ObjectType.Array) { + final ConstraintsArray ignoredAdapters = constraintsMap.getArray("networkIgnoreMask"); + if (ignoredAdapters != null) { + for (Object adapter : ignoredAdapters.toArrayList()) { + switch (adapter.toString()) { + case "adapterTypeEthernet": + networkIgnoreMask += Options.ADAPTER_TYPE_ETHERNET; + break; + case "adapterTypeWifi": + networkIgnoreMask += Options.ADAPTER_TYPE_WIFI; + break; + case "adapterTypeCellular": + networkIgnoreMask += Options.ADAPTER_TYPE_CELLULAR; + break; + case "adapterTypeVpn": + networkIgnoreMask += Options.ADAPTER_TYPE_VPN; + break; + case "adapterTypeLoopback": + networkIgnoreMask += Options.ADAPTER_TYPE_LOOPBACK; + break; + case "adapterTypeAny": + networkIgnoreMask += Options.ADAPTER_TYPE_ANY; + break; + } + } + + } + } + boolean forceSWCodec = false; + if (constraintsMap.hasKey("forceSWCodec") + && constraintsMap.getType("forceSWCodec") == ObjectType.Boolean) { + final boolean v = constraintsMap.getBoolean("forceSWCodec"); + forceSWCodec = v; + } + List forceSWCodecList = new ArrayList<>(); + if(constraintsMap.hasKey("forceSWCodecList") + && constraintsMap.getType("forceSWCodecList") == ObjectType.Array) { + final List array = constraintsMap.getListArray("forceSWCodecList"); + for(Object v : array) { + forceSWCodecList.add(v.toString()); + } + } else { + // disable HW Codec for VP9 by default. + forceSWCodecList.add("VP9"); + } + + ConstraintsMap androidAudioConfiguration = null; + if (constraintsMap.hasKey("androidAudioConfiguration") + && constraintsMap.getType("androidAudioConfiguration") == ObjectType.Map) { + androidAudioConfiguration = constraintsMap.getMap("androidAudioConfiguration"); + } + boolean enableBypassVoiceProcessing = false; + if(options.get("bypassVoiceProcessing") != null) { + enableBypassVoiceProcessing = (boolean)options.get("bypassVoiceProcessing"); + } + initialize(enableBypassVoiceProcessing, networkIgnoreMask, forceSWCodec, forceSWCodecList, androidAudioConfiguration); + result.success(null); + break; + } + case "createPeerConnection": { + Map constraints = call.argument("constraints"); + Map configuration = call.argument("configuration"); + String peerConnectionId = peerConnectionInit(new ConstraintsMap(configuration), + new ConstraintsMap((constraints))); + ConstraintsMap res = new ConstraintsMap(); + res.putString("peerConnectionId", peerConnectionId); + result.success(res.toMap()); + break; + } + case "getUserMedia": { + Map constraints = call.argument("constraints"); + ConstraintsMap constraintsMap = new ConstraintsMap(constraints); + getUserMedia(constraintsMap, result); + break; + } + case "createLocalMediaStream": + createLocalMediaStream(result); + break; + case "getSources": + getSources(result); + break; + case "createOffer": { + String peerConnectionId = call.argument("peerConnectionId"); + Map constraints = call.argument("constraints"); + peerConnectionCreateOffer(peerConnectionId, new ConstraintsMap(constraints), result); + break; + } + case "createAnswer": { + String peerConnectionId = call.argument("peerConnectionId"); + Map constraints = call.argument("constraints"); + peerConnectionCreateAnswer(peerConnectionId, new ConstraintsMap(constraints), result); + break; + } + case "mediaStreamGetTracks": { + String streamId = call.argument("streamId"); + MediaStream stream = getStreamForId(streamId, ""); + Map resultMap = new HashMap<>(); + List audioTracks = new ArrayList<>(); + List videoTracks = new ArrayList<>(); + for (AudioTrack track : stream.audioTracks) { + localTracks.put(track.id(), new LocalAudioTrack(track)); + Map trackMap = new HashMap<>(); + trackMap.put("enabled", track.enabled()); + trackMap.put("id", track.id()); + trackMap.put("kind", track.kind()); + trackMap.put("label", track.id()); + trackMap.put("readyState", "live"); + trackMap.put("remote", false); + audioTracks.add(trackMap); + } + for (VideoTrack track : stream.videoTracks) { + localTracks.put(track.id(), new LocalVideoTrack(track)); + Map trackMap = new HashMap<>(); + trackMap.put("enabled", track.enabled()); + trackMap.put("id", track.id()); + trackMap.put("kind", track.kind()); + trackMap.put("label", track.id()); + trackMap.put("readyState", "live"); + trackMap.put("remote", false); + videoTracks.add(trackMap); + } + resultMap.put("audioTracks", audioTracks); + resultMap.put("videoTracks", videoTracks); + result.success(resultMap); + break; + } + case "addStream": { + String streamId = call.argument("streamId"); + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionAddStream(streamId, peerConnectionId, result); + break; + } + case "removeStream": { + String streamId = call.argument("streamId"); + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionRemoveStream(streamId, peerConnectionId, result); + break; + } + case "setLocalDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + Map description = call.argument("description"); + peerConnectionSetLocalDescription(new ConstraintsMap(description), peerConnectionId, + result); + break; + } + case "setRemoteDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + Map description = call.argument("description"); + peerConnectionSetRemoteDescription(new ConstraintsMap(description), peerConnectionId, + result); + break; + } + case "sendDtmf": { + String peerConnectionId = call.argument("peerConnectionId"); + String tone = call.argument("tone"); + int duration = call.argument("duration"); + int gap = call.argument("gap"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + RtpSender audioSender = null; + for (RtpSender sender : peerConnection.getSenders()) { + + if (sender != null && sender.track() != null && sender.track().kind().equals("audio")) { + audioSender = sender; + } + } + if (audioSender != null) { + DtmfSender dtmfSender = audioSender.dtmf(); + dtmfSender.insertDtmf(tone, duration, gap); + } + result.success("success"); + } else { + resultError("dtmf", "peerConnection is null", result); + } + break; + } + case "addCandidate": { + String peerConnectionId = call.argument("peerConnectionId"); + Map candidate = call.argument("candidate"); + peerConnectionAddICECandidate(new ConstraintsMap(candidate), peerConnectionId, result); + break; + } + case "getStats": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + peerConnectionGetStats(trackId, peerConnectionId, result); + break; + } + case "createDataChannel": { + String peerConnectionId = call.argument("peerConnectionId"); + String label = call.argument("label"); + Map dataChannelDict = call.argument("dataChannelDict"); + createDataChannel(peerConnectionId, label, new ConstraintsMap(dataChannelDict), result); + break; + } + case "dataChannelGetBufferedAmount": { + String peerConnectionId = call.argument("peerConnectionId"); + String dataChannelId = call.argument("dataChannelId"); + dataChannelGetBufferedAmount(peerConnectionId, dataChannelId, result); + break; + } + case "dataChannelSend": { + String peerConnectionId = call.argument("peerConnectionId"); + String dataChannelId = call.argument("dataChannelId"); + String type = call.argument("type"); + Boolean isBinary = type.equals("binary"); + ByteBuffer byteBuffer; + if (isBinary) { + byteBuffer = ByteBuffer.wrap(call.argument("data")); + } else { + String data = call.argument("data"); + byteBuffer = ByteBuffer.wrap(data.getBytes(StandardCharsets.UTF_8)); + } + dataChannelSend(peerConnectionId, dataChannelId, byteBuffer, isBinary); + result.success(null); + break; + } + case "dataChannelClose": { + String peerConnectionId = call.argument("peerConnectionId"); + String dataChannelId = call.argument("dataChannelId"); + dataChannelClose(peerConnectionId, dataChannelId); + result.success(null); + break; + } + case "streamDispose": { + String streamId = call.argument("streamId"); + streamDispose(streamId); + result.success(null); + break; + } + case "mediaStreamTrackSetEnable": { + String trackId = call.argument("trackId"); + Boolean enabled = call.argument("enabled"); + String peerConnectionId = call.argument("peerConnectionId"); + mediaStreamTrackSetEnabled(trackId, enabled, peerConnectionId); + result.success(null); + break; + } + case "mediaStreamAddTrack": { + String streamId = call.argument("streamId"); + String trackId = call.argument("trackId"); + mediaStreamAddTrack(streamId, trackId, result); + for (int i = 0; i < renders.size(); i++) { + FlutterRTCVideoRenderer renderer = renders.valueAt(i); + if (renderer.checkMediaStream(streamId, "local")) { + LocalTrack track = localTracks.get(trackId); + if(track != null) { + renderer.setVideoTrack((VideoTrack) track.track); + } + } + } + break; + } + case "mediaStreamRemoveTrack": { + String streamId = call.argument("streamId"); + String trackId = call.argument("trackId"); + mediaStreamRemoveTrack(streamId, trackId, result); + removeStreamForRendererById(streamId); + break; + } + case "trackDispose": { + String trackId = call.argument("trackId"); + trackDispose(trackId); + result.success(null); + break; + } + case "restartIce": { + String peerConnectionId = call.argument("peerConnectionId"); + restartIce(peerConnectionId); + result.success(null); + break; + } + case "peerConnectionClose": { + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionClose(peerConnectionId); + result.success(null); + break; + } + case "peerConnectionDispose": { + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionDispose(peerConnectionId); + result.success(null); + break; + } + case "createVideoRenderer": { + TextureRegistry.SurfaceProducer producer = textures.createSurfaceProducer(); + FlutterRTCVideoRenderer render = new FlutterRTCVideoRenderer(producer); + renders.put(producer.id(), render); + + EventChannel eventChannel = + new EventChannel( + messenger, + "FlutterWebRTC/Texture" + producer.id()); + + eventChannel.setStreamHandler(render); + render.setEventChannel(eventChannel); + render.setId((int) producer.id()); + + ConstraintsMap params = new ConstraintsMap(); + params.putInt("textureId", (int) producer.id()); + result.success(params.toMap()); + break; + } + case "videoRendererDispose": { + int textureId = call.argument("textureId"); + FlutterRTCVideoRenderer render = renders.get(textureId); + if (render == null) { + resultError("videoRendererDispose", "render [" + textureId + "] not found !", result); + return; + } + render.Dispose(); + renders.delete(textureId); + result.success(null); + break; + } + case "videoRendererSetSrcObject": { + int textureId = call.argument("textureId"); + String streamId = call.argument("streamId"); + String ownerTag = call.argument("ownerTag"); + String trackId = call.argument("trackId"); + FlutterRTCVideoRenderer render = renders.get(textureId); + if (render == null) { + resultError("videoRendererSetSrcObject", "render [" + textureId + "] not found !", result); + return; + } + MediaStream stream = null; + if (ownerTag.equals("local")) { + stream = localStreams.get(streamId); + } else { + stream = getStreamForId(streamId, ownerTag); + } + if (trackId != null && !trackId.equals("0")){ + render.setStream(stream, trackId, ownerTag); + } else { + render.setStream(stream, ownerTag); + } + result.success(null); + break; + } + case "mediaStreamTrackHasTorch": { + String trackId = call.argument("trackId"); + cameraUtils.hasTorch(trackId, result); + break; + } + case "mediaStreamTrackSetTorch": { + String trackId = call.argument("trackId"); + boolean torch = call.argument("torch"); + cameraUtils.setTorch(trackId, torch, result); + break; + } + case "mediaStreamTrackSetZoom": { + String trackId = call.argument("trackId"); + double zoomLevel = call.argument("zoomLevel"); + cameraUtils.setZoom(trackId, zoomLevel, result); + break; + } + case "mediaStreamTrackSetFocusMode": { + cameraUtils.setFocusMode(call, result); + break; + } + case "mediaStreamTrackSetFocusPoint":{ + Map focusPoint = call.argument("focusPoint"); + Boolean reset = (Boolean)focusPoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)focusPoint.get("x"); + y = (Double)focusPoint.get("y"); + } + cameraUtils.setFocusPoint(call, new Point(x, y), result); + break; + } + case "mediaStreamTrackSetExposureMode": { + cameraUtils.setExposureMode(call, result); + break; + } + case "mediaStreamTrackSetExposurePoint": { + Map exposurePoint = call.argument("exposurePoint"); + Boolean reset = (Boolean)exposurePoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)exposurePoint.get("x"); + y = (Double)exposurePoint.get("y"); + } + cameraUtils.setExposurePoint(call, new Point(x, y), result); + break; + } + case "mediaStreamTrackSwitchCamera": { + String trackId = call.argument("trackId"); + getUserMediaImpl.switchCamera(trackId, result); + break; + } + case "setVolume": { + String trackId = call.argument("trackId"); + double volume = call.argument("volume"); + String peerConnectionId = call.argument("peerConnectionId"); + mediaStreamTrackSetVolume(trackId, volume, peerConnectionId); + result.success(null); + break; + } + case "selectAudioOutput": { + String deviceId = call.argument("deviceId"); + AudioSwitchManager.instance.selectAudioOutput(AudioDeviceKind.fromTypeName(deviceId)); + result.success(null); + break; + } + case "clearAndroidCommunicationDevice": { + AudioSwitchManager.instance.clearCommunicationDevice(); + result.success(null); + break; + } + case "setMicrophoneMute": + boolean mute = call.argument("mute"); + AudioSwitchManager.instance.setMicrophoneMute(mute); + result.success(null); + break; + case "selectAudioInput": + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { + String deviceId = call.argument("deviceId"); + getUserMediaImpl.setPreferredInputDevice(deviceId); + result.success(null); + } else { + result.notImplemented(); + } + break; + case "setAndroidAudioConfiguration": { + Map configuration = call.argument("configuration"); + AudioSwitchManager.instance.setAudioConfiguration(configuration); + result.success(null); + break; + } + case "enableSpeakerphone": + boolean enable = call.argument("enable"); + AudioSwitchManager.instance.enableSpeakerphone(enable); + result.success(null); + break; + case "enableSpeakerphoneButPreferBluetooth": + AudioSwitchManager.instance.enableSpeakerButPreferBluetooth(); + result.success(null); + break; + case "requestCapturePermission": { + getUserMediaImpl.requestCapturePermission(result); + break; + } + case "getDisplayMedia": { + Map constraints = call.argument("constraints"); + ConstraintsMap constraintsMap = new ConstraintsMap(constraints); + getDisplayMedia(constraintsMap, result); + break; + } + case "startRecordToFile": + //This method can a lot of different exceptions + //so we should notify plugin user about them + try { + String path = call.argument("path"); + VideoTrack videoTrack = null; + String videoTrackId = call.argument("videoTrackId"); + String peerConnectionId = call.argument("peerConnectionId"); + if (videoTrackId != null) { + MediaStreamTrack track = getTrackForId(videoTrackId, peerConnectionId); + if (track instanceof VideoTrack) { + videoTrack = (VideoTrack) track; + } + } + AudioChannel audioChannel = null; + if (call.hasArgument("audioChannel") + && call.argument("audioChannel") != null) { + audioChannel = AudioChannel.values()[(Integer) call.argument("audioChannel")]; + } + Integer recorderId = call.argument("recorderId"); + if (videoTrack != null || audioChannel != null) { + getUserMediaImpl.startRecordingToFile(path, recorderId, videoTrack, audioChannel); + result.success(null); + } else { + resultError("startRecordToFile", "No tracks", result); + } + } catch (Exception e) { + resultError("startRecordToFile", e.getMessage(), result); + } + break; + case "stopRecordToFile": + Integer recorderId = call.argument("recorderId"); + String albumName = call.argument("albumName"); + getUserMediaImpl.stopRecording(recorderId, albumName); + result.success(null); + break; + case "captureFrame": { + String path = call.argument("path"); + String videoTrackId = call.argument("trackId"); + String peerConnectionId = call.argument("peerConnectionId"); + if (videoTrackId != null) { + MediaStreamTrack track = getTrackForId(videoTrackId, peerConnectionId); + if (track instanceof VideoTrack) { + new FrameCapturer((VideoTrack) track, new File(path), result); + } else { + resultError("captureFrame", "It's not video track", result); + } + } else { + resultError("captureFrame", "Track is null", result); + } + break; + } + case "getLocalDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + SessionDescription sdp = peerConnection.getLocalDescription(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } else { + resultError("getLocalDescription", "peerConnection is null", result); + } + break; + } + case "getRemoteDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + SessionDescription sdp = peerConnection.getRemoteDescription(); + if (null == sdp) { + result.success(null); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } + } else { + resultError("getRemoteDescription", "peerConnection is null", result); + } + break; + } + case "setConfiguration": { + String peerConnectionId = call.argument("peerConnectionId"); + Map configuration = call.argument("configuration"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + peerConnectionSetConfiguration(new ConstraintsMap(configuration), peerConnection); + result.success(null); + } else { + resultError("setConfiguration", "peerConnection is null", result); + } + break; + } + case "addTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + List streamIds = call.argument("streamIds"); + addTrack(peerConnectionId, trackId, streamIds, result); + break; + } + case "removeTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String senderId = call.argument("senderId"); + + removeTrack(peerConnectionId, senderId, result); + break; + } + case "addTransceiver": { + String peerConnectionId = call.argument("peerConnectionId"); + Map transceiverInit = call.argument("transceiverInit"); + if (call.hasArgument("trackId")) { + String trackId = call.argument("trackId"); + addTransceiver(peerConnectionId, trackId, transceiverInit, result); + } else if (call.hasArgument("mediaType")) { + String mediaType = call.argument("mediaType"); + addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); + } else { + resultError("addTransceiver", "Incomplete parameters", result); + } + break; + } + case "rtpTransceiverSetDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String direction = call.argument("direction"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverSetDirection(peerConnectionId, direction, transceiverId, result); + break; + } + case "rtpTransceiverGetDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverGetDirection(peerConnectionId, transceiverId, result); + break; + } + case "rtpTransceiverGetCurrentDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverGetCurrentDirection(peerConnectionId, transceiverId, result); + break; + } + case "rtpTransceiverStop": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverStop(peerConnectionId, transceiverId, result); + break; + } + case "rtpSenderSetParameters": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + Map parameters = call.argument("parameters"); + rtpSenderSetParameters(peerConnectionId, rtpSenderId, parameters, result); + break; + } + case "rtpSenderReplaceTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, true, result); + break; + } + case "rtpSenderSetTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, false, result); + break; + } + case "rtpSenderSetStreams": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + List streamIds = call.argument("streamIds"); + rtpSenderSetStreams(peerConnectionId, rtpSenderId, streamIds, result); + break; + } + case "getSenders": { + String peerConnectionId = call.argument("peerConnectionId"); + getSenders(peerConnectionId, result); + break; + } + case "getReceivers": { + String peerConnectionId = call.argument("peerConnectionId"); + getReceivers(peerConnectionId, result); + break; + } + case "getTransceivers": { + String peerConnectionId = call.argument("peerConnectionId"); + getTransceivers(peerConnectionId, result); + break; + } + case "setPreferredInputDevice": { + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { + String deviceId = call.argument("deviceId"); + getUserMediaImpl.setPreferredInputDevice(deviceId); + result.success(null); + } else { + result.notImplemented(); + } + break; + } + case "getRtpSenderCapabilities": { + String kind = call.argument("kind"); + MediaStreamTrack.MediaType mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if (kind.equals("video")) { + mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + } + RtpCapabilities capabilities = mFactory.getRtpSenderCapabilities(mediaType); + result.success(capabilitiestoMap(capabilities).toMap()); + break; + } + case "getRtpReceiverCapabilities": { + String kind = call.argument("kind"); + MediaStreamTrack.MediaType mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if (kind.equals("video")) { + mediaType = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + } + RtpCapabilities capabilities = mFactory.getRtpReceiverCapabilities(mediaType); + result.success(capabilitiestoMap(capabilities).toMap()); + break; + } + case "setCodecPreferences": { + String peerConnectionId = call.argument("peerConnectionId"); + List> codecs = call.argument("codecs"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverSetCodecPreferences(peerConnectionId, transceiverId, codecs, result); + break; + } + case "getSignalingState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getSignalingState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.signalingStateString(pc.signalingState())); + result.success(params.toMap()); + } + break; + } + case "getIceGatheringState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getIceGatheringState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.iceGatheringStateString(pc.iceGatheringState())); + result.success(params.toMap()); + } + break; + } + case "getIceConnectionState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getIceConnectionState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.iceConnectionStateString(pc.iceConnectionState())); + result.success(params.toMap()); + } + break; + } + case "getConnectionState": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection pc = getPeerConnection(peerConnectionId); + if (pc == null) { + resultError("getConnectionState", "peerConnection is null", result); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("state", Utils.connectionStateString(pc.connectionState())); + result.success(params.toMap()); + } + break; + } + default: + if(frameCryptor.handleMethodCall(call, result)) { + break; + } + result.notImplemented(); + break; + } + } + + private ConstraintsMap capabilitiestoMap(RtpCapabilities capabilities) { + ConstraintsMap capabilitiesMap = new ConstraintsMap(); + ConstraintsArray codecArr = new ConstraintsArray(); + for(RtpCapabilities.CodecCapability codec : capabilities.codecs){ + ConstraintsMap codecMap = new ConstraintsMap(); + codecMap.putString("mimeType", codec.mimeType); + codecMap.putInt("clockRate", codec.clockRate); + if(codec.numChannels != null) + codecMap.putInt("channels", codec.numChannels); + List sdpFmtpLineArr = new ArrayList<>(); + for(Map.Entry entry : codec.parameters.entrySet()) { + if(entry.getKey().length() > 0) { + sdpFmtpLineArr.add(entry.getKey() + "=" + entry.getValue()); + } else { + sdpFmtpLineArr.add(entry.getValue()); + } + } + if(sdpFmtpLineArr.size() > 0) + codecMap.putString("sdpFmtpLine", String.join(";", sdpFmtpLineArr)); + codecArr.pushMap(codecMap); + } + ConstraintsArray headerExtensionsArr = new ConstraintsArray(); + for(RtpCapabilities.HeaderExtensionCapability headerExtension : capabilities.headerExtensions){ + ConstraintsMap headerExtensionMap = new ConstraintsMap(); + headerExtensionMap.putString("uri", headerExtension.getUri()); + headerExtensionMap.putInt("id", headerExtension.getPreferredId()); + headerExtensionMap.putBoolean("encrypted", headerExtension.getPreferredEncrypted()); + headerExtensionsArr.pushMap(headerExtensionMap); + } + capabilitiesMap.putArray("codecs", codecArr.toArrayList()); + capabilitiesMap.putArray("headerExtensions", headerExtensionsArr.toArrayList()); + ConstraintsArray fecMechanismsArr = new ConstraintsArray(); + capabilitiesMap.putArray("fecMechanisms", fecMechanismsArr.toArrayList()); + return capabilitiesMap; + } + + private PeerConnection getPeerConnection(String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + return (pco == null) ? null : pco.getPeerConnection(); + } + + private List createIceServers(ConstraintsArray iceServersArray) { + final int size = (iceServersArray == null) ? 0 : iceServersArray.size(); + List iceServers = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + ConstraintsMap iceServerMap = iceServersArray.getMap(i); + boolean hasUsernameAndCredential = + iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); + if (iceServerMap.hasKey("url")) { + if (hasUsernameAndCredential) { + iceServers.add(IceServer.builder(iceServerMap.getString("url")) + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")).createIceServer()); + } else { + iceServers.add( + IceServer.builder(iceServerMap.getString("url")).createIceServer()); + } + } else if (iceServerMap.hasKey("urls")) { + switch (iceServerMap.getType("urls")) { + case String: + if (hasUsernameAndCredential) { + iceServers.add(IceServer.builder(iceServerMap.getString("urls")) + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")).createIceServer()); + } else { + iceServers.add(IceServer.builder(iceServerMap.getString("urls")) + .createIceServer()); + } + break; + case Array: + ConstraintsArray urls = iceServerMap.getArray("urls"); + List urlsList = new ArrayList<>(); + + for (int j = 0; j < urls.size(); j++) { + urlsList.add(urls.getString(j)); + } + + Builder builder = IceServer.builder(urlsList); + + if (hasUsernameAndCredential) { + builder + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")); + } + + iceServers.add(builder.createIceServer()); + + break; + } + } + } + return iceServers; + } + + private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { + ConstraintsArray iceServersArray = null; + if (map != null) { + iceServersArray = map.getArray("iceServers"); + } + List iceServers = createIceServers(iceServersArray); + RTCConfiguration conf = new RTCConfiguration(iceServers); + if (map == null) { + return conf; + } + + // iceTransportPolicy (public api) + if (map.hasKey("iceTransportPolicy") + && map.getType("iceTransportPolicy") == ObjectType.String) { + final String v = map.getString("iceTransportPolicy"); + if (v != null) { + switch (v) { + case "all": // public + conf.iceTransportsType = IceTransportsType.ALL; + break; + case "relay": // public + conf.iceTransportsType = IceTransportsType.RELAY; + break; + case "nohost": + conf.iceTransportsType = IceTransportsType.NOHOST; + break; + case "none": + conf.iceTransportsType = IceTransportsType.NONE; + break; + } + } + } + + // bundlePolicy (public api) + if (map.hasKey("bundlePolicy") + && map.getType("bundlePolicy") == ObjectType.String) { + final String v = map.getString("bundlePolicy"); + if (v != null) { + switch (v) { + case "balanced": // public + conf.bundlePolicy = BundlePolicy.BALANCED; + break; + case "max-compat": // public + conf.bundlePolicy = BundlePolicy.MAXCOMPAT; + break; + case "max-bundle": // public + conf.bundlePolicy = BundlePolicy.MAXBUNDLE; + break; + } + } + } + + // rtcpMuxPolicy (public api) + if (map.hasKey("rtcpMuxPolicy") + && map.getType("rtcpMuxPolicy") == ObjectType.String) { + final String v = map.getString("rtcpMuxPolicy"); + if (v != null) { + switch (v) { + case "negotiate": // public + conf.rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE; + break; + case "require": // public + conf.rtcpMuxPolicy = RtcpMuxPolicy.REQUIRE; + break; + } + } + } + + // FIXME: peerIdentity of type DOMString (public api) + // FIXME: certificates of type sequence (public api) + + // iceCandidatePoolSize of type unsigned short, defaulting to 0 + if (map.hasKey("iceCandidatePoolSize") + && map.getType("iceCandidatePoolSize") == ObjectType.Number) { + final int v = map.getInt("iceCandidatePoolSize"); + if (v > 0) { + conf.iceCandidatePoolSize = v; + } + } + + // sdpSemantics + if (map.hasKey("sdpSemantics") + && map.getType("sdpSemantics") == ObjectType.String) { + final String v = map.getString("sdpSemantics"); + if (v != null) { + switch (v) { + case "plan-b": + conf.sdpSemantics = SdpSemantics.PLAN_B; + break; + case "unified-plan": + conf.sdpSemantics = SdpSemantics.UNIFIED_PLAN; + break; + } + } + } + + // maxIPv6Networks + if (map.hasKey("maxIPv6Networks") + && map.getType("maxIPv6Networks") == ObjectType.Number) { + conf.maxIPv6Networks = map.getInt("maxIPv6Networks"); + } + + // === below is private api in webrtc === + + // tcpCandidatePolicy (private api) + if (map.hasKey("tcpCandidatePolicy") + && map.getType("tcpCandidatePolicy") == ObjectType.String) { + final String v = map.getString("tcpCandidatePolicy"); + if (v != null) { + switch (v) { + case "enabled": + conf.tcpCandidatePolicy = TcpCandidatePolicy.ENABLED; + break; + case "disabled": + conf.tcpCandidatePolicy = TcpCandidatePolicy.DISABLED; + break; + } + } + } + + // candidateNetworkPolicy (private api) + if (map.hasKey("candidateNetworkPolicy") + && map.getType("candidateNetworkPolicy") == ObjectType.String) { + final String v = map.getString("candidateNetworkPolicy"); + if (v != null) { + switch (v) { + case "all": + conf.candidateNetworkPolicy = CandidateNetworkPolicy.ALL; + break; + case "low_cost": + conf.candidateNetworkPolicy = CandidateNetworkPolicy.LOW_COST; + break; + } + } + } + + // KeyType (private api) + if (map.hasKey("keyType") + && map.getType("keyType") == ObjectType.String) { + final String v = map.getString("keyType"); + if (v != null) { + switch (v) { + case "RSA": + conf.keyType = KeyType.RSA; + break; + case "ECDSA": + conf.keyType = KeyType.ECDSA; + break; + } + } + } + + // continualGatheringPolicy (private api) + if (map.hasKey("continualGatheringPolicy") + && map.getType("continualGatheringPolicy") == ObjectType.String) { + final String v = map.getString("continualGatheringPolicy"); + if (v != null) { + switch (v) { + case "gather_once": + conf.continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE; + break; + case "gather_continually": + conf.continualGatheringPolicy = ContinualGatheringPolicy.GATHER_CONTINUALLY; + break; + } + } + } + + // audioJitterBufferMaxPackets (private api) + if (map.hasKey("audioJitterBufferMaxPackets") + && map.getType("audioJitterBufferMaxPackets") == ObjectType.Number) { + final int v = map.getInt("audioJitterBufferMaxPackets"); + if (v > 0) { + conf.audioJitterBufferMaxPackets = v; + } + } + + // iceConnectionReceivingTimeout (private api) + if (map.hasKey("iceConnectionReceivingTimeout") + && map.getType("iceConnectionReceivingTimeout") == ObjectType.Number) { + final int v = map.getInt("iceConnectionReceivingTimeout"); + conf.iceConnectionReceivingTimeout = v; + } + + // iceBackupCandidatePairPingInterval (private api) + if (map.hasKey("iceBackupCandidatePairPingInterval") + && map.getType("iceBackupCandidatePairPingInterval") == ObjectType.Number) { + final int v = map.getInt("iceBackupCandidatePairPingInterval"); + conf.iceBackupCandidatePairPingInterval = v; + } + + // audioJitterBufferFastAccelerate (private api) + if (map.hasKey("audioJitterBufferFastAccelerate") + && map.getType("audioJitterBufferFastAccelerate") == ObjectType.Boolean) { + final boolean v = map.getBoolean("audioJitterBufferFastAccelerate"); + conf.audioJitterBufferFastAccelerate = v; + } + + // pruneTurnPorts (private api) + if (map.hasKey("pruneTurnPorts") + && map.getType("pruneTurnPorts") == ObjectType.Boolean) { + final boolean v = map.getBoolean("pruneTurnPorts"); + conf.pruneTurnPorts = v; + } + + // presumeWritableWhenFullyRelayed (private api) + if (map.hasKey("presumeWritableWhenFullyRelayed") + && map.getType("presumeWritableWhenFullyRelayed") == ObjectType.Boolean) { + final boolean v = map.getBoolean("presumeWritableWhenFullyRelayed"); + conf.presumeWritableWhenFullyRelayed = v; + } + // cryptoOptions + if (map.hasKey("cryptoOptions") + && map.getType("cryptoOptions") == ObjectType.Map) { + final ConstraintsMap cryptoOptions = map.getMap("cryptoOptions"); + conf.cryptoOptions = CryptoOptions.builder() + .setEnableGcmCryptoSuites(cryptoOptions.hasKey("enableGcmCryptoSuites") && cryptoOptions.getBoolean("enableGcmCryptoSuites")) + .setRequireFrameEncryption(cryptoOptions.hasKey("requireFrameEncryption") && cryptoOptions.getBoolean("requireFrameEncryption")) + .setEnableEncryptedRtpHeaderExtensions(cryptoOptions.hasKey("enableEncryptedRtpHeaderExtensions") && cryptoOptions.getBoolean("enableEncryptedRtpHeaderExtensions")) + .setEnableAes128Sha1_32CryptoCipher(cryptoOptions.hasKey("enableAes128Sha1_32CryptoCipher") && cryptoOptions.getBoolean("enableAes128Sha1_32CryptoCipher")) + .createCryptoOptions(); + } + if (map.hasKey("enableCpuOveruseDetection") + && map.getType("enableCpuOveruseDetection") == ObjectType.Boolean) { + final boolean v = map.getBoolean("enableCpuOveruseDetection"); + conf.enableCpuOveruseDetection = v; + } + return conf; + } + + public String peerConnectionInit(ConstraintsMap configuration, ConstraintsMap constraints) { + String peerConnectionId = getNextStreamUUID(); + RTCConfiguration conf = parseRTCConfiguration(configuration); + PeerConnectionObserver observer = new PeerConnectionObserver(conf, this, messenger, peerConnectionId); + PeerConnection peerConnection + = mFactory.createPeerConnection( + conf, + parseMediaConstraints(constraints), + observer); + observer.setPeerConnection(peerConnection); + mPeerConnectionObservers.put(peerConnectionId, observer); + return peerConnectionId; + } + + @Override + public boolean putLocalStream(String streamId, MediaStream stream) { + localStreams.put(streamId, stream); + return true; + } + + @Override + public boolean putLocalTrack(String trackId, LocalTrack track) { + localTracks.put(trackId, track); + return true; + } + + @Override + public LocalTrack getLocalTrack(String trackId) { + return localTracks.get(trackId); + } + + public MediaStreamTrack getRemoteTrack(String trackId) { + for (Entry entry : mPeerConnectionObservers.entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + MediaStreamTrack track = pco.remoteTracks.get(trackId); + if (track == null) { + track = pco.getTransceiversTrack(trackId); + } + if (track != null) { + return track; + } + } + return null; + } + + @Override + public String getNextStreamUUID() { + String uuid; + + do { + uuid = UUID.randomUUID().toString(); + } while (getStreamForId(uuid, "") != null); + + return uuid; + } + + @Override + public String getNextTrackUUID() { + String uuid; + + do { + uuid = UUID.randomUUID().toString(); + } while (getTrackForId(uuid, null) != null); + + return uuid; + } + + @Override + public PeerConnectionFactory getPeerConnectionFactory() { + return mFactory; + } + + @Override + public PeerConnectionObserver getPeerConnectionObserver(String peerConnectionId) { + return mPeerConnectionObservers.get(peerConnectionId); + } + + @Nullable + @Override + public Activity getActivity() { + return activity; + } + + @Nullable + @Override + public Context getApplicationContext() { + return context; + } + + @Override + public BinaryMessenger getMessenger() { + return messenger; + } + + MediaStream getStreamForId(String id, String peerConnectionId) { + MediaStream stream = null; + if (peerConnectionId.length() > 0) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco != null) { + stream = pco.remoteStreams.get(id); + } + } else { + for (Entry entry : mPeerConnectionObservers + .entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + stream = pco.remoteStreams.get(id); + if (stream != null) { + break; + } + } + } + if (stream == null) { + stream = localStreams.get(id); + } + + return stream; + } + + public MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { + LocalTrack localTrack = localTracks.get(trackId); + MediaStreamTrack mediaStreamTrack = null; + if (localTrack == null) { + for (Entry entry : mPeerConnectionObservers.entrySet()) { + if (peerConnectionId != null && entry.getKey().compareTo(peerConnectionId) != 0) + continue; + + PeerConnectionObserver pco = entry.getValue(); + mediaStreamTrack = pco.remoteTracks.get(trackId); + + if (mediaStreamTrack == null) { + mediaStreamTrack = pco.getTransceiversTrack(trackId); + } + + if (mediaStreamTrack != null) { + break; + } + } + } else { + mediaStreamTrack = localTrack.track; + } + + return mediaStreamTrack; + } + + + public void getUserMedia(ConstraintsMap constraints, Result result) { + String streamId = getNextStreamUUID(); + MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); + + if (mediaStream == null) { + // XXX The following does not follow the getUserMedia() algorithm + // specified by + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + // with respect to distinguishing the various causes of failure. + resultError("getUserMediaFailed", "Failed to create new media stream", result); + return; + } + + getUserMediaImpl.getUserMedia(constraints, result, mediaStream); + } + + public void getDisplayMedia(ConstraintsMap constraints, Result result) { + String streamId = getNextStreamUUID(); + MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); + + if (mediaStream == null) { + // XXX The following does not follow the getUserMedia() algorithm + // specified by + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + // with respect to distinguishing the various causes of failure. + resultError("getDisplayMedia", "Failed to create new media stream", result); + return; + } + + getUserMediaImpl.getDisplayMedia(constraints, result, mediaStream); + } + + public void getSources(Result result) { + ConstraintsArray array = new ConstraintsArray(); + String[] names = new String[Camera.getNumberOfCameras()]; + + for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { + ConstraintsMap info = getCameraInfo(i); + if (info != null) { + array.pushMap(info); + } + } + + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + ConstraintsMap audio = new ConstraintsMap(); + audio.putString("label", "Audio"); + audio.putString("deviceId", "audio-1"); + audio.putString("kind", "audioinput"); + audio.putString("groupId", "microphone"); + array.pushMap(audio); + } else { + android.media.AudioManager audioManager = ((android.media.AudioManager) context + .getSystemService(Context.AUDIO_SERVICE)); + final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); + for (int i = 0; i < devices.length; i++) { + AudioDeviceInfo device = devices[i]; + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC || device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO || + device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + ConstraintsMap audio = new ConstraintsMap(); + audio.putString("label", AudioUtils.getAudioDeviceLabel(device)); + audio.putString("deviceId", AudioUtils.getAudioDeviceId(device)); + audio.putString("groupId", AudioUtils.getAudioGroupId(device)); + audio.putString("kind", "audioinput"); + array.pushMap(audio); + } + } + } + + List audioOutputs = AudioSwitchManager.instance.availableAudioDevices(); + + for (AudioDevice audioOutput : audioOutputs) { + ConstraintsMap audioOutputMap = new ConstraintsMap(); + audioOutputMap.putString("label", audioOutput.getName()); + audioOutputMap.putString("deviceId", AudioDeviceKind.fromAudioDevice(audioOutput).typeName); + audioOutputMap.putString("groupId", "" + AudioDeviceKind.fromAudioDevice(audioOutput).typeName); + audioOutputMap.putString("kind", "audiooutput"); + array.pushMap(audioOutputMap); + } + + ConstraintsMap map = new ConstraintsMap(); + map.putArray("sources", array.toArrayList()); + + result.success(map.toMap()); + } + + private void createLocalMediaStream(Result result) { + String streamId = getNextStreamUUID(); + MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); + localStreams.put(streamId, mediaStream); + + if (mediaStream == null) { + resultError("createLocalMediaStream", "Failed to create new media stream", result); + return; + } + Map resultMap = new HashMap<>(); + resultMap.put("streamId", mediaStream.getId()); + result.success(resultMap); + } + + public void trackDispose(final String trackId) { + LocalTrack track = localTracks.get(trackId); + if (track == null) { + Log.d(TAG, "trackDispose() track is null"); + return; + } + removeTrackForRendererById(trackId); + track.setEnabled(false); + if (track instanceof LocalVideoTrack) { + getUserMediaImpl.removeVideoCapturer(trackId); + } + localTracks.remove(trackId); + } + + public void mediaStreamTrackSetEnabled(final String id, final boolean enabled, String peerConnectionId) { + MediaStreamTrack track = getTrackForId(id, peerConnectionId); + + if (track == null) { + Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); + return; + } else if (track.enabled() == enabled) { + return; + } + track.setEnabled(enabled); + } + + public void mediaStreamTrackSetVolume(final String id, final double volume, String peerConnectionId) { + MediaStreamTrack track = getTrackForId(id, null); + if (track instanceof AudioTrack) { + Log.d(TAG, "setVolume(): " + id + "," + volume); + try { + ((AudioTrack) track).setVolume(volume); + } catch (Exception e) { + Log.e(TAG, "setVolume(): error", e); + } + } else { + Log.w(TAG, "setVolume(): track not found: " + id); + } + } + + public void mediaStreamAddTrack(final String streamId, final String trackId, Result result) { + MediaStream mediaStream = localStreams.get(streamId); + if (mediaStream != null) { + MediaStreamTrack track = getTrackForId(trackId, null);//localTracks.get(trackId); + if (track != null) { + String kind = track.kind(); + if (kind.equals("audio")) { + mediaStream.addTrack((AudioTrack) track); + result.success(null); + } else if (kind.equals("video")) { + mediaStream.addTrack((VideoTrack) track); + result.success(null); + } else { + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] has unsupported type: " + kind, result); + } + } else { + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); + } + } else { + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() stream [" + streamId + "] is null", result); + } + } + + public void mediaStreamRemoveTrack(final String streamId, final String trackId, Result result) { + MediaStream mediaStream = localStreams.get(streamId); + if (mediaStream != null) { + LocalTrack track = localTracks.get(trackId); + if (track != null) { + String kind = track.kind(); + if (kind.equals("audio")) { + mediaStream.removeTrack((AudioTrack) track.track); + result.success(null); + } else if (kind.equals("video")) { + mediaStream.removeTrack((VideoTrack) track.track); + result.success(null); + } else { + resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() track [" + trackId + "] has unsupported type: " + kind, result); + } + } else { + resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() track [" + trackId + "] is null", result); + } + } else { + resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() stream [" + streamId + "] is null", result); + } + } + + public void mediaStreamTrackRelease(final String streamId, final String _trackId) { + MediaStream stream = localStreams.get(streamId); + if (stream == null) { + Log.d(TAG, "mediaStreamTrackRelease() stream is null"); + return; + } + LocalTrack track = localTracks.get(_trackId); + if (track == null) { + Log.d(TAG, "mediaStreamTrackRelease() track is null"); + return; + } + track.setEnabled(false); // should we do this? + localTracks.remove(_trackId); + if (track.kind().equals("audio")) { + stream.removeTrack((AudioTrack) track.track); + } else if (track.kind().equals("video")) { + stream.removeTrack((VideoTrack) track.track); + getUserMediaImpl.removeVideoCapturer(_trackId); + } + } + + public ConstraintsMap getCameraInfo(int index) { + CameraInfo info = new CameraInfo(); + + try { + Camera.getCameraInfo(index, info); + } catch (Exception e) { + Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, e); + return null; + } + ConstraintsMap params = new ConstraintsMap(); + String facing = info.facing == 1 ? "front" : "back"; + params.putString("label", + "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); + params.putString("deviceId", "" + index); + params.putString("facing", facing); + params.putString("kind", "videoinput"); + params.putString("groupId", "camera"); + return params; + } + + private MediaConstraints defaultConstraints() { + MediaConstraints constraints = new MediaConstraints(); + // TODO video media + constraints.mandatory.add(new KeyValuePair("OfferToReceiveAudio", "true")); + constraints.mandatory.add(new KeyValuePair("OfferToReceiveVideo", "true")); + constraints.optional.add(new KeyValuePair("DtlsSrtpKeyAgreement", "true")); + return constraints; + } + + public void peerConnectionSetConfiguration(ConstraintsMap configuration, + PeerConnection peerConnection) { + if (peerConnection == null) { + Log.d(TAG, "peerConnectionSetConfiguration() peerConnection is null"); + return; + } + peerConnection.setConfiguration(parseRTCConfiguration(configuration)); + } + + public void peerConnectionAddStream(final String streamId, final String id, Result result) { + MediaStream mediaStream = localStreams.get(streamId); + if (mediaStream == null) { + Log.d(TAG, "peerConnectionAddStream() mediaStream is null"); + return; + } + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + boolean res = peerConnection.addStream(mediaStream); + Log.d(TAG, "addStream" + result); + result.success(res); + } else { + resultError("peerConnectionAddStream", "peerConnection is null", result); + } + } + + public void peerConnectionRemoveStream(final String streamId, final String id, Result result) { + MediaStream mediaStream = localStreams.get(streamId); + if (mediaStream == null) { + Log.d(TAG, "peerConnectionRemoveStream() mediaStream is null"); + return; + } + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + peerConnection.removeStream(mediaStream); + result.success(null); + } else { + resultError("peerConnectionRemoveStream", "peerConnection is null", result); + } + } + + public void peerConnectionCreateOffer( + String id, + ConstraintsMap constraints, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + + if (peerConnection != null) { + peerConnection.createOffer(new SdpObserver() { + @Override + public void onCreateFailure(String s) { + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR: " + s, result); + } + + @Override + public void onCreateSuccess(final SessionDescription sdp) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } + + @Override + public void onSetFailure(String s) { + } + + @Override + public void onSetSuccess() { + } + }, parseMediaConstraints(constraints)); + } else { + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR", result); + } + } + + public void peerConnectionCreateAnswer( + String id, + ConstraintsMap constraints, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + + if (peerConnection != null) { + peerConnection.createAnswer(new SdpObserver() { + @Override + public void onCreateFailure(String s) { + resultError("peerConnectionCreateAnswer", "WEBRTC_CREATE_ANSWER_ERROR: " + s, result); + } + + @Override + public void onCreateSuccess(final SessionDescription sdp) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } + + @Override + public void onSetFailure(String s) { + } + + @Override + public void onSetSuccess() { + } + }, parseMediaConstraints(constraints)); + } else { + resultError("peerConnectionCreateAnswer", "peerConnection is null", result); + } + } + + public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + SessionDescription sdp = new SessionDescription( + Type.fromCanonicalForm(sdpMap.getString("type")), + sdpMap.getString("sdp") + ); + + peerConnection.setLocalDescription(new SdpObserver() { + @Override + public void onCreateSuccess(final SessionDescription sdp) { + } + + @Override + public void onSetSuccess() { + result.success(null); + } + + @Override + public void onCreateFailure(String s) { + } + + @Override + public void onSetFailure(String s) { + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: " + s, result); + } + }, sdp); + } else { + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: peerConnection is null", result); + } + } + + public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + SessionDescription sdp = new SessionDescription( + Type.fromCanonicalForm(sdpMap.getString("type")), + sdpMap.getString("sdp") + ); + + peerConnection.setRemoteDescription(new SdpObserver() { + @Override + public void onCreateSuccess(final SessionDescription sdp) { + } + + @Override + public void onSetSuccess() { + result.success(null); + } + + @Override + public void onCreateFailure(String s) { + } + + @Override + public void onSetFailure(String s) { + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: " + s, result); + } + }, sdp); + } else { + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: peerConnection is null", result); + } + } + + public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, + final Result result) { + boolean res = false; + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + int sdpMLineIndex = 0; + if (!candidateMap.isNull("sdpMLineIndex")) { + sdpMLineIndex = candidateMap.getInt("sdpMLineIndex"); + } + IceCandidate candidate = new IceCandidate( + candidateMap.getString("sdpMid"), + sdpMLineIndex, + candidateMap.getString("candidate")); + res = peerConnection.addIceCandidate(candidate); + } else { + resultError("peerConnectionAddICECandidate", "peerConnection is null", result); + } + result.success(res); + } + + public void peerConnectionGetStats(String trackId, String id, final Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + resultError("peerConnectionGetStats", "peerConnection is null", result); + } else { + if(trackId == null || trackId.isEmpty()) { + pco.getStats(result); + } else { + pco.getStatsForTrack(trackId, result); + } + } + } + + public void restartIce(final String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "restartIce() peerConnection is null"); + } else { + pco.restartIce(); + } + } + + public void peerConnectionClose(final String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "peerConnectionClose() peerConnection is null"); + } else { + pco.close(); + } + } + + public void peerConnectionDispose(final String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco != null) { + if (peerConnectionDispose(pco)) { + + mPeerConnectionObservers.remove(id); + } + } else { + Log.d(TAG, "peerConnectionDispose() peerConnectionObserver is null"); + } + if (mPeerConnectionObservers.size() == 0) { + AudioSwitchManager.instance.stop(); + } + } + + public boolean peerConnectionDispose(final PeerConnectionObserver pco) { + if (pco.getPeerConnection() == null) { + Log.d(TAG, "peerConnectionDispose() peerConnection is null"); + } else { + pco.dispose(); + return true; + } + return false; + } + + public void streamDispose(final String streamId) { + MediaStream stream = localStreams.get(streamId); + if (stream != null) { + streamDispose(stream); + localStreams.remove(streamId); + removeStreamForRendererById(streamId); + } else { + Log.d(TAG, "streamDispose() mediaStream is null"); + } + } + + public void streamDispose(final MediaStream stream) { + List videoTracks = stream.videoTracks; + for (VideoTrack track : videoTracks) { + localTracks.remove(track.id()); + getUserMediaImpl.removeVideoCapturer(track.id()); + stream.removeTrack(track); + } + List audioTracks = stream.audioTracks; + for (AudioTrack track : audioTracks) { + localTracks.remove(track.id()); + stream.removeTrack(track); + } + } + + private void removeStreamForRendererById(String streamId) { + for (int i = 0; i < renders.size(); i++) { + FlutterRTCVideoRenderer renderer = renders.valueAt(i); + if (renderer.checkMediaStream(streamId, "local")) { + renderer.setStream(null, ""); + } + } + } + + private void removeTrackForRendererById(String trackId) { + for (int i = 0; i < renders.size(); i++) { + FlutterRTCVideoRenderer renderer = renders.valueAt(i); + if (renderer.checkVideoTrack(trackId, "local")) { + renderer.setStream(null, null); + } + } + } + + public void createDataChannel(final String peerConnectionId, String label, ConstraintsMap config, + Result result) { + // Forward to PeerConnectionObserver which deals with DataChannels + // because DataChannel is owned by PeerConnection. + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "createDataChannel() peerConnection is null"); + } else { + pco.createDataChannel(label, config, result); + } + } + + public void dataChannelSend(String peerConnectionId, String dataChannelId, ByteBuffer bytebuffer, + Boolean isBinary) { + // Forward to PeerConnectionObserver which deals with DataChannels + // because DataChannel is owned by PeerConnection. + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "dataChannelSend() peerConnection is null"); + } else { + pco.dataChannelSend(dataChannelId, bytebuffer, isBinary); + } + } + + public void dataChannelGetBufferedAmount(String peerConnectionId, String dataChannelId, Result result) { + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "dataChannelGetBufferedAmount() peerConnection is null"); + resultError("dataChannelGetBufferedAmount", "peerConnection is null", result); + } else { + pco.dataChannelGetBufferedAmount(dataChannelId, result); + } + } + + public void dataChannelClose(String peerConnectionId, String dataChannelId) { + // Forward to PeerConnectionObserver which deals with DataChannels + // because DataChannel is owned by PeerConnection. + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "dataChannelClose() peerConnection is null"); + } else { + pco.dataChannelClose(dataChannelId); + } + } + + public void setActivity(Activity activity) { + this.activity = activity; + } + + public void addTrack(String peerConnectionId, String trackId, List streamIds, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + LocalTrack track = localTracks.get(trackId); + if (track == null) { + resultError("addTrack", "track is null", result); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTrack", "peerConnection is null", result); + } else { + pco.addTrack(track.track, streamIds, result); + } + } + + public void removeTrack(String peerConnectionId, String senderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("removeTrack", "peerConnection is null", result); + } else { + pco.removeTrack(senderId, result); + } + } + + public void addTransceiver(String peerConnectionId, String trackId, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + LocalTrack track = localTracks.get(trackId); + if (track == null) { + resultError("addTransceiver", "track is null", result); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTransceiver", "peerConnection is null", result); + } else { + pco.addTransceiver(track.track, transceiverInit, result); + } + } + + public void addTransceiverOfType(String peerConnectionId, String mediaType, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTransceiverOfType", "peerConnection is null", result); + } else { + pco.addTransceiverOfType(mediaType, transceiverInit, result); + } + } + + public void rtpTransceiverSetDirection(String peerConnectionId, String direction, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverSetDirection(direction, transceiverId, result); + } + } + + public void rtpTransceiverSetCodecPreferences(String peerConnectionId, String transceiverId, List> codecs, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("setCodecPreferences", "peerConnection is null", result); + } else { + pco.rtpTransceiverSetCodecPreferences(transceiverId, codecs, result); + } + } + + public void rtpTransceiverGetDirection(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverGetDirection(transceiverId, result); + } + } + + public void rtpTransceiverGetCurrentDirection(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverGetCurrentDirection(transceiverId, result); + } + } + + public void rtpTransceiverStop(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverStop", "peerConnection is null", result); + } else { + pco.rtpTransceiverStop(transceiverId, result); + } + } + + public void rtpSenderSetParameters(String peerConnectionId, String rtpSenderId, Map parameters, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetParameters", "peerConnection is null", result); + } else { + pco.rtpSenderSetParameters(rtpSenderId, parameters, result); + } + } + + public void getSenders(String peerConnectionId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("getSenders", "peerConnection is null", result); + } else { + pco.getSenders(result); + } + } + + public void getReceivers(String peerConnectionId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("getReceivers", "peerConnection is null", result); + } else { + pco.getReceivers(result); + } + } + + public void getTransceivers(String peerConnectionId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("getTransceivers", "peerConnection is null", result); + } else { + pco.getTransceivers(result); + } + } + + public void rtpSenderSetTrack(String peerConnectionId, String rtpSenderId, String trackId, boolean replace, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetTrack", "peerConnection is null", result); + } else { + MediaStreamTrack mediaStreamTrack = null; + LocalTrack track = localTracks.get(trackId); + if (trackId.length() > 0) { + if (track == null) { + resultError("rtpSenderSetTrack", "track is null", result); + return; + } + } + + if(track != null) { + mediaStreamTrack = track.track; + } + pco.rtpSenderSetTrack(rtpSenderId, mediaStreamTrack, result, replace); + } + } + + public void rtpSenderSetStreams(String peerConnectionId, String rtpSenderId, List streamIds, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetStreams", "peerConnection is null", result); + } else { + pco.rtpSenderSetStreams(rtpSenderId, streamIds, result); + } + } + + + public void reStartCamera() { + if (null == getUserMediaImpl) { + return; + } + getUserMediaImpl.reStartCamera(new GetUserMediaImpl.IsCameraEnabled() { + @Override + public boolean isEnabled(String id) { + if (!localTracks.containsKey(id)) { + return false; + } + return localTracks.get(id).enabled(); + } + }); + } + + @RequiresApi(api = Build.VERSION_CODES.M) + void requestPermissions( + final ArrayList permissions, + final Callback successCallback, + final Callback errorCallback) { + PermissionUtils.Callback callback = + (permissions_, grantResults) -> { + List grantedPermissions = new ArrayList<>(); + List deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < permissions_.length; ++i) { + String permission = permissions_[i]; + int grantResult = grantResults[i]; + + if (grantResult == PackageManager.PERMISSION_GRANTED) { + grantedPermissions.add(permission); + } else { + deniedPermissions.add(permission); + } + } + + // Success means that all requested permissions were granted. + for (String p : permissions) { + if (!grantedPermissions.contains(p)) { + // According to step 6 of the getUserMedia() algorithm + // "if the result is denied, jump to the step Permission + // Failure." + errorCallback.invoke(deniedPermissions); + return; + } + } + successCallback.invoke(grantedPermissions); + }; + + final Activity activity = getActivity(); + final Context context = getApplicationContext(); + PermissionUtils.requestPermissions( + context, + activity, + permissions.toArray(new String[permissions.size()]), callback); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java b/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java new file mode 100644 index 0000000000..7bee5d0dc2 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java @@ -0,0 +1,231 @@ +package com.cloudwebrtc.webrtc; + +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.CapturerObserver; +import org.webrtc.ThreadUtils; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; + +import android.annotation.TargetApi; +import android.content.Context; +import android.content.Intent; +import android.media.projection.MediaProjection; +import android.view.Surface; +import android.view.WindowManager; +import android.app.Activity; +import android.hardware.display.DisplayManager; +import android.util.DisplayMetrics; +import android.hardware.display.VirtualDisplay; +import android.media.projection.MediaProjectionManager; +import android.os.Looper; +import android.os.Handler; +import android.os.Build; +import android.view.Display; + +/** + * An copy of ScreenCapturerAndroid to capture the screen content while being aware of device orientation + */ +@TargetApi(21) +public class OrientationAwareScreenCapturer implements VideoCapturer, VideoSink { + private static final int DISPLAY_FLAGS = + DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION; + // DPI for VirtualDisplay, does not seem to matter for us. + private static final int VIRTUAL_DISPLAY_DPI = 400; + private final Intent mediaProjectionPermissionResultData; + private final MediaProjection.Callback mediaProjectionCallback; + private int width; + private int height; + private int oldWidth; + private int oldHeight; + private VirtualDisplay virtualDisplay; + private SurfaceTextureHelper surfaceTextureHelper; + private CapturerObserver capturerObserver; + private long numCapturedFrames = 0; + private MediaProjection mediaProjection; + private boolean isDisposed = false; + private MediaProjectionManager mediaProjectionManager; + private WindowManager windowManager; + private boolean isPortrait; + + /** + * Constructs a new Screen Capturer. + * + * @param mediaProjectionPermissionResultData the result data of MediaProjection permission + * activity; the calling app must validate that result code is Activity.RESULT_OK before + * calling this method. + * @param mediaProjectionCallback MediaProjection callback to implement application specific + * logic in events such as when the user revokes a previously granted capture permission. + **/ + public OrientationAwareScreenCapturer(Intent mediaProjectionPermissionResultData, + MediaProjection.Callback mediaProjectionCallback) { + this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData; + this.mediaProjectionCallback = mediaProjectionCallback; + } + + public void onFrame(VideoFrame frame) { + checkNotDisposed(); + this.isPortrait = isDeviceOrientationPortrait(); + final int max = Math.max(this.height, this.width); + final int min = Math.min(this.height, this.width); + if (this.isPortrait) { + changeCaptureFormat(min, max, 15); + } else { + changeCaptureFormat(max, min, 15); + } + capturerObserver.onFrameCaptured(frame); + } + + private boolean isDeviceOrientationPortrait() { + final Display display = windowManager.getDefaultDisplay(); + final DisplayMetrics metrics = new DisplayMetrics(); + display.getRealMetrics(metrics); + + return metrics.heightPixels > metrics.widthPixels; + } + + + private void checkNotDisposed() { + if (isDisposed) { + throw new RuntimeException("capturer is disposed."); + } + } + + public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper, + final Context applicationContext, final CapturerObserver capturerObserver) { + checkNotDisposed(); + if (capturerObserver == null) { + throw new RuntimeException("capturerObserver not set."); + } + this.capturerObserver = capturerObserver; + if (surfaceTextureHelper == null) { + throw new RuntimeException("surfaceTextureHelper not set."); + } + this.surfaceTextureHelper = surfaceTextureHelper; + + this.windowManager = (WindowManager) applicationContext.getSystemService( + Context.WINDOW_SERVICE); + this.mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService( + Context.MEDIA_PROJECTION_SERVICE); + } + + @Override + public synchronized void startCapture( + final int width, final int height, final int ignoredFramerate) { + //checkNotDisposed(); + + this.isPortrait = isDeviceOrientationPortrait(); + if (this.isPortrait) { + this.width = width; + this.height = height; + } else { + this.height = width; + this.width = height; + } + + mediaProjection = mediaProjectionManager.getMediaProjection( + Activity.RESULT_OK, mediaProjectionPermissionResultData); + + // Let MediaProjection callback use the SurfaceTextureHelper thread. + mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler()); + + createVirtualDisplay(); + capturerObserver.onCapturerStarted(true); + surfaceTextureHelper.startListening(this); + } + + @Override + public synchronized void stopCapture() { + checkNotDisposed(); + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + surfaceTextureHelper.stopListening(); + capturerObserver.onCapturerStopped(); + if (virtualDisplay != null) { + virtualDisplay.release(); + virtualDisplay = null; + } + if (mediaProjection != null) { + // Unregister the callback before stopping, otherwise the callback recursively + // calls this method. + mediaProjection.unregisterCallback(mediaProjectionCallback); + mediaProjection.stop(); + mediaProjection = null; + } + } + }); + } + + @Override + public synchronized void dispose() { + isDisposed = true; + } + + /** + * Changes output video format. This method can be used to scale the output + * video, or to change orientation when the captured screen is rotated for example. + * + * @param width new output video width + * @param height new output video height + * @param ignoredFramerate ignored + */ + @Override + public synchronized void changeCaptureFormat( + final int width, final int height, final int ignoredFramerate) { + checkNotDisposed(); + if (this.oldWidth != width || this.oldHeight != height) { + this.oldWidth = width; + this.oldHeight = height; + + if (oldHeight > oldWidth) { + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + if (virtualDisplay != null && surfaceTextureHelper != null) { + virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); + surfaceTextureHelper.setTextureSize(oldWidth, oldHeight); + virtualDisplay.resize(oldWidth, oldHeight, VIRTUAL_DISPLAY_DPI); + } + } + }); + } + + if (oldWidth > oldHeight) { + surfaceTextureHelper.setTextureSize(oldWidth, oldHeight); + virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); + final Handler handler = new Handler(Looper.getMainLooper()); + handler.postDelayed(new Runnable() { + @Override + public void run() { + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + if (virtualDisplay != null && surfaceTextureHelper != null) { + virtualDisplay.resize(oldWidth, oldHeight, VIRTUAL_DISPLAY_DPI); + } + } + }); + } + }, 700); + } + } + } + + private void createVirtualDisplay() { + surfaceTextureHelper.setTextureSize(width, height); + surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); + virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, + VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), + null /* callback */, null /* callback handler */); + } + + @Override + public boolean isScreencast() { + return true; + } + + public long getNumCapturedFrames() { + return numCapturedFrames; + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 1981c5c965..9c36dce354 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -1,533 +1,1159 @@ package com.cloudwebrtc.webrtc; -import java.io.UnsupportedEncodingException; -import java.lang.ref.SoftReference; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import android.util.Base64; import android.util.Log; -import android.util.SparseArray; -import android.support.annotation.Nullable; +import androidx.annotation.Nullable; + +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsArray; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.Utils; + +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodChannel.Result; + +import java.lang.reflect.Field; +import java.math.BigInteger; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.UUID; import org.webrtc.AudioTrack; +import org.webrtc.CandidatePairChangeEvent; import org.webrtc.DataChannel; +import org.webrtc.DtmfSender; import org.webrtc.IceCandidate; import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnection; +import org.webrtc.RTCStats; +import org.webrtc.RTCStatsReport; +import org.webrtc.RtpCapabilities; +import org.webrtc.RtpParameters; import org.webrtc.RtpReceiver; -import org.webrtc.StatsObserver; -import org.webrtc.StatsReport; +import org.webrtc.RtpSender; +import org.webrtc.RtpTransceiver; import org.webrtc.VideoTrack; -import io.flutter.plugin.common.EventChannel; -import io.flutter.plugin.common.MethodChannel.Result; - class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.StreamHandler { - private final static String TAG = FlutterWebRTCPlugin.TAG; + private final static String TAG = FlutterWebRTCPlugin.TAG; + private final Map dataChannels = new HashMap<>(); + private final BinaryMessenger messenger; + private final String id; + private PeerConnection peerConnection; + private final PeerConnection.RTCConfiguration configuration; + final Map remoteStreams = new HashMap<>(); + final Map remoteTracks = new HashMap<>(); + final Map transceivers = new HashMap<>(); + private final StateProvider stateProvider; + private final EventChannel eventChannel; + private EventChannel.EventSink eventSink; + + PeerConnectionObserver(PeerConnection.RTCConfiguration configuration, StateProvider stateProvider, BinaryMessenger messenger, String id) { + this.configuration = configuration; + this.stateProvider = stateProvider; + this.messenger = messenger; + this.id = id; + + eventChannel = new EventChannel(messenger, "FlutterWebRTC/peerConnectionEvent" + id); + eventChannel.setStreamHandler(this); + } + + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + + @Override + public void onListen(Object o, EventChannel.EventSink sink) { + eventSink = new AnyThreadSink(sink); + } + + @Override + public void onCancel(Object o) { + eventSink = null; + } + + public PeerConnection getPeerConnection() { + return peerConnection; + } + + void setPeerConnection(PeerConnection peerConnection) { + this.peerConnection = peerConnection; + } + + void restartIce() { + peerConnection.restartIce(); + } + + void close() { + peerConnection.close(); + remoteStreams.clear(); + remoteTracks.clear(); + dataChannels.clear(); + } + + void dispose() { + this.close(); + peerConnection.dispose(); + eventChannel.setStreamHandler(null); + } + + void createDataChannel(String label, ConstraintsMap config, Result result) { + DataChannel.Init init = new DataChannel.Init(); + if (config != null) { + if (config.hasKey("id")) { + init.id = config.getInt("id"); + } + if (config.hasKey("ordered")) { + init.ordered = config.getBoolean("ordered"); + } + if (config.hasKey("maxRetransmits")) { + init.maxRetransmits = config.getInt("maxRetransmits"); + } + if (config.hasKey("protocol")) { + init.protocol = config.getString("protocol"); + } + if (config.hasKey("negotiated")) { + init.negotiated = config.getBoolean("negotiated"); + } + } + DataChannel dataChannel = peerConnection.createDataChannel(label, init); + // XXX RTP data channels are not defined by the WebRTC standard, have + // been deprecated in Chromium, and Google have decided (in 2015) to no + // longer support them (in the face of multiple reported issues of + // breakages). + String flutterId = getNextDataChannelUUID(); + if (dataChannel != null) { + dataChannels.put(flutterId, dataChannel); + registerDataChannelObserver(flutterId, dataChannel); + + ConstraintsMap params = new ConstraintsMap(); + params.putInt("id", dataChannel.id()); + params.putString("label", dataChannel.label()); + params.putString("flutterId", flutterId); + result.success(params.toMap()); + } else { + resultError("createDataChannel", "Can't create data-channel for id: " + init.id, result); + } + } + + void dataChannelClose(String dataChannelId) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + dataChannel.close(); + dataChannels.remove(dataChannelId); + } else { + Log.d(TAG, "dataChannelClose() dataChannel is null"); + } + } + + void dataChannelSend(String dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); + dataChannel.send(buffer); + } else { + Log.d(TAG, "dataChannelSend() dataChannel is null"); + } + } + + void dataChannelGetBufferedAmount(String dataChannelId, Result result) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + ConstraintsMap params = new ConstraintsMap(); + params.putLong("bufferedAmount", dataChannel.bufferedAmount()); + result.success(params.toMap()); + } else { + Log.d(TAG, "dataChannelGetBufferedAmount() dataChannel is null"); + resultError("dataChannelGetBufferedAmount", "DataChannel is null", result); + } + } + + RtpTransceiver getRtpTransceiverById(String id) { + RtpTransceiver transceiver = transceivers.get(id); + if (null == transceiver) { + List transceivers = peerConnection.getTransceivers(); + for (RtpTransceiver t : transceivers) { + if (id.equals(t.getMid())) { + transceiver = t; + } + } + } + return transceiver; + } + + RtpSender getRtpSenderById(String id) { + List senders = peerConnection.getSenders(); + for (RtpSender sender : senders) { + if (id.equals(sender.id())) { + return sender; + } + } + return null; + } + + RtpReceiver getRtpReceiverById(String id) { + List receivers = peerConnection.getReceivers(); + for (RtpReceiver receiver : receivers) { + if (id.equals(receiver.id())) { + return receiver; + } + } + return null; + } + + void handleStatsReport(RTCStatsReport rtcStatsReport, Result result) { + Map reports = rtcStatsReport.getStatsMap(); + ConstraintsMap params = new ConstraintsMap(); + ConstraintsArray stats = new ConstraintsArray(); + + for (RTCStats report : reports.values()) { + ConstraintsMap report_map = new ConstraintsMap(); + + report_map.putString("id", report.getId()); + report_map.putString("type", report.getType()); + report_map.putDouble("timestamp", report.getTimestampUs()); + + Map values = report.getMembers(); + ConstraintsMap v_map = new ConstraintsMap(); + for (String key : values.keySet()) { + Object v = values.get(key); + if(v instanceof String) { + v_map.putString(key, (String)v); + } else if(v instanceof String[]) { + ConstraintsArray arr = new ConstraintsArray(); + for(String s : (String[])v) { + arr.pushString(s); + } + v_map.putArray(key, arr.toArrayList()); + } else if(v instanceof Integer) { + v_map.putInt(key, (Integer)v); + } else if(v instanceof Long) { + v_map.putLong(key, (Long)v); + } else if(v instanceof Double) { + v_map.putDouble(key, (Double)v); + } else if(v instanceof Boolean) { + v_map.putBoolean(key, (Boolean)v); + } else if(v instanceof BigInteger){ + v_map.putLong(key, ((BigInteger)v).longValue()); + } else if(v instanceof LinkedHashMap) { + ConstraintsMap m = new ConstraintsMap(); + for(Map.Entry entry : ((LinkedHashMap)v).entrySet()) { + Object value = entry.getValue(); + if(value instanceof String) { + m.putString(entry.getKey(), (String)value); + } else if(value instanceof Integer) { + m.putInt(entry.getKey(), (Integer)value); + } else if(value instanceof Long) { + m.putLong(entry.getKey(), (Long)value); + } else if(value instanceof Double) { + m.putDouble(entry.getKey(), (Double)value); + } else if(value instanceof Boolean) { + m.putBoolean(entry.getKey(), (Boolean)value); + } else if(value instanceof BigInteger) { + m.putLong(entry.getKey(), ((BigInteger)value).longValue()); + } else { + Log.d(TAG, "getStats() unknown type: " + value.getClass().getName() + " for [" + entry.getKey() + "] value: " + value); + } + } + v_map.putMap(key, m.toMap()); + } else { + Log.d(TAG, "getStats() unknown type: " + v.getClass().getName() + " for [" + key + "] value: " + v); + } + } + report_map.putMap("values", v_map.toMap()); + stats.pushMap(report_map); + } + + params.putArray("stats", stats.toArrayList()); + result.success(params.toMap()); + } - private final SparseArray dataChannels - = new SparseArray(); - private final String id; - private PeerConnection peerConnection; - final Map remoteStreams; - final Map remoteTracks; - private final FlutterWebRTCPlugin plugin; + void getStatsForTrack(String trackId, Result result) { + if (trackId == null || trackId.isEmpty()) { + resultError("peerConnectionGetStats", "MediaStreamTrack not found for id: " + trackId, result); + return; + } - EventChannel eventChannel; - EventChannel.EventSink eventSink; + RtpSender sender = null; + RtpReceiver receiver = null; + for (RtpSender s : peerConnection.getSenders()) { + if (s.track() != null && trackId.equals(s.track().id())) { + sender = s; + break; + } + } + for (RtpReceiver r : peerConnection.getReceivers()) { + if (r.track() != null && trackId.equals(r.track().id())) { + receiver = r; + break; + } + } + if (sender != null) { + peerConnection.getStats(sender, rtcStatsReport -> handleStatsReport(rtcStatsReport, result)); + } else if (receiver != null) { + peerConnection.getStats(receiver, rtcStatsReport -> handleStatsReport(rtcStatsReport, result)); + } else { + resultError("peerConnectionGetStats", "MediaStreamTrack not found for id: " + trackId, result); + } + } + + void getStats(final Result result) { + peerConnection.getStats( + rtcStatsReport -> handleStatsReport(rtcStatsReport, result)); + } + + @Override + public void onIceCandidate(final IceCandidate candidate) { + Log.d(TAG, "onIceCandidate"); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onCandidate"); + params.putMap("candidate", candidateToMap(candidate)); + sendEvent(params); + } + + @Override + public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { + Log.d(TAG, "onSelectedCandidatePairChanged"); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onSelectedCandidatePairChanged"); + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("lastDataReceivedMs", event.lastDataReceivedMs); + candidateParams.putMap("local", candidateToMap(event.local)); + candidateParams.putMap("remote", candidateToMap(event.remote)); + candidateParams.putString("reason", event.reason); + params.putMap("candidate", candidateParams.toMap()); + sendEvent(params); + } + + @Override + public void onIceCandidatesRemoved(final IceCandidate[] candidates) { + Log.d(TAG, "onIceCandidatesRemoved"); + } + + @Override + public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "iceConnectionState"); + params.putString("state", Utils.iceConnectionStateString(iceConnectionState)); + sendEvent(params); + } + + @Override + public void onStandardizedIceConnectionChange(PeerConnection.IceConnectionState newState) { + + } + + @Override + public void onIceConnectionReceivingChange(boolean var1) { + } + + @Override + public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) { + Log.d(TAG, "onIceGatheringChange" + iceGatheringState.name()); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "iceGatheringState"); + params.putString("state", Utils.iceGatheringStateString(iceGatheringState)); + sendEvent(params); + } + + private String getUIDForStream(MediaStream mediaStream) { + for (Iterator> i + = remoteStreams.entrySet().iterator(); + i.hasNext(); ) { + Map.Entry e = i.next(); + if (e.getValue().equals(mediaStream)) { + return e.getKey(); + } + } + return null; + } + + @Override + public void onAddStream(MediaStream mediaStream) { + String streamUID = null; + String streamId = mediaStream.getId(); + // The native WebRTC implementation has a special concept of a default + // MediaStream instance with the label default that the implementation + // reuses. + if ("default".equals(streamId)) { + for (Map.Entry e + : remoteStreams.entrySet()) { + if (e.getValue().equals(mediaStream)) { + streamUID = e.getKey(); + break; + } + } + } - /* - Map event = new HashMap<>(); - event.put("event", "onSomeEvent"); - event.put("param1", 111); - event.put("width", 176); - event.put("height", 144); - nativeToDartEventSink.success(event); - */ + if (streamUID == null) { + streamUID = stateProvider.getNextStreamUUID(); + remoteStreams.put(streamId, mediaStream); + } - /** - * The StringBuilder cache utilized by {@link #statsToJSON} in - * order to minimize the number of allocations of StringBuilder - * instances and, more importantly, the allocations of its char - * buffer in an attempt to improve performance. - */ - private SoftReference statsToJSONStringBuilder - = new SoftReference(null); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onAddStream"); + params.putString("streamId", streamId); + params.putString("ownerTag", id); - PeerConnectionObserver(FlutterWebRTCPlugin plugin, String id) { - this.plugin = plugin; - this.id = id; - this.remoteStreams = new HashMap(); - this.remoteTracks = new HashMap(); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + for (int i = 0; i < mediaStream.videoTracks.size(); i++) { + VideoTrack track = mediaStream.videoTracks.get(i); + String trackId = track.id(); - this.eventChannel = - new EventChannel( - plugin.registrar().messenger(), - "cloudwebrtc.com/WebRTC/peerConnectoinEvent" + id); - eventChannel.setStreamHandler(this); - this.eventSink = null; - } + remoteTracks.put(trackId, track); - @Override - public void onListen(Object o, EventChannel.EventSink sink) { - eventSink = sink; + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", "Video"); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + videoTracks.pushMap(trackInfo); + } + for (int i = 0; i < mediaStream.audioTracks.size(); i++) { + AudioTrack track = mediaStream.audioTracks.get(i); + String trackId = track.id(); + + remoteTracks.put(trackId, track); + + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", "Audio"); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + audioTracks.pushMap(trackInfo); } + params.putArray("audioTracks", audioTracks.toArrayList()); + params.putArray("videoTracks", videoTracks.toArrayList()); - @Override - public void onCancel(Object o) { - eventSink = null; + sendEvent(params); + } + + void sendEvent(ConstraintsMap event) { + if (eventSink != null) { + eventSink.success(event.toMap()); } + } + + @Override + public void onRemoveStream(MediaStream mediaStream) { - PeerConnection getPeerConnection() { - return peerConnection; + String streamId = mediaStream.getId(); + + for (VideoTrack track : mediaStream.videoTracks) { + this.remoteTracks.remove(track.id()); + } + for (AudioTrack track : mediaStream.audioTracks) { + this.remoteTracks.remove(track.id()); } - void setPeerConnection(PeerConnection peerConnection) { - this.peerConnection = peerConnection; + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onRemoveStream"); + params.putString("streamId", streamId); + sendEvent(params); + } + + @Override + public void onTrack(RtpTransceiver transceiver) { + } + + @Override + public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { + Log.d(TAG, "onAddTrack"); + // for plan-b + for (MediaStream stream : mediaStreams) { + String streamId = stream.getId(); + MediaStreamTrack track = receiver.track(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onAddTrack"); + params.putString("streamId", streamId); + params.putString("ownerTag", id); + params.putString("trackId", track.id()); + + String trackId = track.id(); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", track.kind()); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + params.putMap("track", trackInfo.toMap()); + sendEvent(params); + + if ("audio".equals(track.kind())) { + AudioSwitchManager.instance.start(); + } } - void close() { - eventChannel.setStreamHandler(null); - peerConnection.close(); - peerConnection.dispose(); - remoteStreams.clear(); - remoteTracks.clear(); - dataChannels.clear(); + // For unified-plan + ConstraintsMap params = new ConstraintsMap(); + ConstraintsArray streams = new ConstraintsArray(); + for (int i = 0; i < mediaStreams.length; i++) { + MediaStream stream = mediaStreams[i]; + streams.pushMap(new ConstraintsMap(mediaStreamToMap(stream))); } - void createDataChannel(String label, ConstraintsMap config, Result result) { - DataChannel.Init init = new DataChannel.Init(); - if (config != null) { - if (config.hasKey("id")) { - init.id = config.getInt("id"); - } - if (config.hasKey("ordered")) { - init.ordered = config.getBoolean("ordered"); - } - if (config.hasKey("maxRetransmitTime")) { - init.maxRetransmitTimeMs = config.getInt("maxRetransmitTime"); - } - if (config.hasKey("maxRetransmits")) { - init.maxRetransmits = config.getInt("maxRetransmits"); - } - if (config.hasKey("protocol")) { - init.protocol = config.getString("protocol"); - } - if (config.hasKey("negotiated")) { - init.negotiated = config.getBoolean("negotiated"); - } - } - DataChannel dataChannel = peerConnection.createDataChannel(label, init); - // XXX RTP data channels are not defined by the WebRTC standard, have - // been deprecated in Chromium, and Google have decided (in 2015) to no - // longer support them (in the face of multiple reported issues of - // breakages). - int dataChannelId = init.id; - if (-1 != dataChannelId) { - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); + params.putString("event", "onTrack"); + params.putArray("streams", streams.toArrayList()); + params.putMap("track", mediaTrackToMap(receiver.track())); + params.putMap("receiver", rtpReceiverToMap(receiver)); + + if (this.configuration.sdpSemantics == PeerConnection.SdpSemantics.UNIFIED_PLAN) { + List transceivers = peerConnection.getTransceivers(); + for (RtpTransceiver transceiver : transceivers) { + if (transceiver.getReceiver() != null && receiver.id().equals(transceiver.getReceiver().id())) { + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + this.transceivers.put(transceiverId,transceiver); + } + params.putMap("transceiver", transceiverToMap(transceiverId, transceiver)); } + } + } + sendEvent(params); + } + + @Override + public void onRemoveTrack(RtpReceiver rtpReceiver) { + Log.d(TAG, "onRemoveTrack"); + + MediaStreamTrack track = rtpReceiver.track(); + String trackId = track.id(); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", track.kind()); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onRemoveTrack"); + params.putString("trackId", track.id()); + params.putMap("track", trackInfo.toMap()); + sendEvent(params); + } + + @Override + public void onDataChannel(DataChannel dataChannel) { + String flutterId = getNextDataChannelUUID(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "didOpenDataChannel"); + params.putInt("id", dataChannel.id()); + params.putString("label", dataChannel.label()); + params.putString("flutterId", flutterId); + + dataChannels.put(flutterId, dataChannel); + registerDataChannelObserver(flutterId, dataChannel); + + sendEvent(params); + } + + private void registerDataChannelObserver(String dcId, DataChannel dataChannel) { + // DataChannel.registerObserver implementation does not allow to + // unregister, so the observer is registered here and is never + // unregistered + dataChannel.registerObserver( + new DataChannelObserver(messenger, id, dcId, dataChannel)); + } + + @Override + public void onRenegotiationNeeded() { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onRenegotiationNeeded"); + sendEvent(params); + } + + @Override + public void onSignalingChange(PeerConnection.SignalingState signalingState) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "signalingState"); + params.putString("state", Utils.signalingStateString(signalingState)); + sendEvent(params); + } + + @Override + public void onConnectionChange(PeerConnection.PeerConnectionState connectionState) { + Log.d(TAG, "onConnectionChange" + connectionState.name()); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "peerConnectionState"); + params.putString("state", Utils.connectionStateString(connectionState)); + sendEvent(params); + } + + @Nullable + private String transceiverDirectionString(RtpTransceiver.RtpTransceiverDirection direction) { + switch (direction) { + case SEND_RECV: + return "sendrecv"; + case SEND_ONLY: + return "sendonly"; + case RECV_ONLY: + return "recvonly"; + case INACTIVE: + return "inactive"; + case STOPPED: + return "stopped"; + } + return null; + } + + private RtpTransceiver.RtpTransceiverDirection stringToTransceiverDirection(String direction) { + switch (direction) { + case "sendrecv": + return RtpTransceiver.RtpTransceiverDirection.SEND_RECV; + case "sendonly": + return RtpTransceiver.RtpTransceiverDirection.SEND_ONLY; + case "recvonly": + return RtpTransceiver.RtpTransceiverDirection.RECV_ONLY; + case "inactive": + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + case "stopped": + return RtpTransceiver.RtpTransceiverDirection.STOPPED; + } + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + } + + private MediaStreamTrack.MediaType stringToMediaType(String mediaType) { + MediaStreamTrack.MediaType type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if (mediaType.equals("audio")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + else if (mediaType.equals("video")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + return type; + } + + private RtpParameters.Encoding mapToEncoding(Map parameters) { + RtpParameters.Encoding encoding = new RtpParameters.Encoding((String) parameters.get("rid"), true, 1.0); + + if (parameters.get("active") != null) { + encoding.active = (Boolean) parameters.get("active"); + } - result.success(null); + if (parameters.get("ssrc") != null) { + encoding.ssrc = ((Integer) parameters.get("ssrc")).longValue(); } - void dataChannelClose(int dataChannelId) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - dataChannel.close(); - dataChannels.remove(dataChannelId); - } else { - Log.d(TAG, "dataChannelClose() dataChannel is null"); - } + if (parameters.get("minBitrate") != null) { + encoding.minBitrateBps = (Integer) parameters.get("minBitrate"); } - void dataChannelSend(int dataChannelId, String data, String type) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - byte[] byteArray; - if (type.equals("text")) { - try { - byteArray = data.getBytes("UTF-8"); - } catch (UnsupportedEncodingException e) { - Log.d(TAG, "Could not encode text string as UTF-8."); - return; - } - } else if (type.equals("binary")) { - byteArray = Base64.decode(data, Base64.NO_WRAP); - } else { - Log.e(TAG, "Unsupported data type: " + type); - return; - } - ByteBuffer byteBuffer = ByteBuffer.wrap(byteArray); - DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, type.equals("binary")); - dataChannel.send(buffer); - } else { - Log.d(TAG, "dataChannelSend() dataChannel is null"); - } + if (parameters.get("maxBitrate") != null) { + encoding.maxBitrateBps = (Integer) parameters.get("maxBitrate"); } - void getStats(String trackId, final Result result) { - MediaStreamTrack track = null; - if (trackId == null - || trackId.isEmpty() - || (track = plugin.localTracks.get(trackId)) != null - || (track = remoteTracks.get(trackId)) != null) { - peerConnection.getStats( - new StatsObserver() { - @Override - public void onComplete(StatsReport[] reports) { - - final int reportCount = reports.length; - ConstraintsMap params = new ConstraintsMap(); - ConstraintsArray stats = new ConstraintsArray(); - - for (int i = 0; i < reportCount; ++i) { - StatsReport report = reports[i]; - ConstraintsMap report_map = new ConstraintsMap(); - - report_map.putString("id", report.id); - report_map.putString("type", report.type); - report_map.putDouble("timestamp", report.timestamp); - - StatsReport.Value[] values = report.values; - ConstraintsMap v_map = new ConstraintsMap(); - final int valueCount = values.length; - for (int j = 0; j < valueCount; ++j) { - StatsReport.Value v = values[j]; - v_map.putString(v.name, v.value); - } - - report_map.putMap("values", v_map.toMap()); - stats.pushMap(report_map); - } - - params.putArray("stats", stats.toArrayList()); - result.success(params.toMap()); - } - }, - track); - } else { - Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); - result.error("peerConnectionGetStats", - "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId, - null); - } + if (parameters.get("maxFramerate") != null) { + encoding.maxFramerate = (Integer) parameters.get("maxFramerate"); } - @Override - public void onIceCandidate(final IceCandidate candidate) { - Log.d(TAG, "onIceCandidate"); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onCandidate"); - ConstraintsMap candidateParams = new ConstraintsMap(); - candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); - candidateParams.putString("sdpMid", candidate.sdpMid); - candidateParams.putString("candidate", candidate.sdp); - params.putMap("candidate", candidateParams.toMap()); - sendEvent(params); + if (parameters.get("numTemporalLayers") != null) { + encoding.numTemporalLayers = (Integer) parameters.get("numTemporalLayers"); } - @Override - public void onIceCandidatesRemoved(final IceCandidate[] candidates) { - Log.d(TAG, "onIceCandidatesRemoved"); + if (parameters.get("scaleResolutionDownBy") != null) { + encoding.scaleResolutionDownBy = (Double) parameters.get("scaleResolutionDownBy"); } - @Override - public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "iceConnectionState"); - params.putString("state", iceConnectionStateString(iceConnectionState)); - sendEvent(params); + if (parameters.get("scalabilityMode") != null) { + encoding.scalabilityMode = (String) parameters.get("scalabilityMode"); } - @Override - public void onIceConnectionReceivingChange(boolean var1) { + return encoding; + } + + private RtpTransceiver.RtpTransceiverInit mapToRtpTransceiverInit(Map parameters) { + List streamIds = (List) parameters.get("streamIds"); + List> encodingsParams = (List>) parameters.get("sendEncodings"); + String direction = (String) parameters.get("direction"); + List sendEncodings = new ArrayList<>(); + RtpTransceiver.RtpTransceiverInit init = null; + + if (streamIds == null) { + streamIds = new ArrayList(); } - @Override - public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) { - Log.d(TAG, "onIceGatheringChange" + iceGatheringState.name()); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "iceGatheringState"); - params.putString("state", iceGatheringStateString(iceGatheringState)); - sendEvent(params); + if (direction == null) { + direction = "sendrecv"; } - private String getReactTagForStream(MediaStream mediaStream) { - for (Iterator> i - = remoteStreams.entrySet().iterator(); - i.hasNext();) { - Map.Entry e = i.next(); - if (e.getValue().equals(mediaStream)) { - return e.getKey(); - } - } - return null; - } - - @Override - public void onAddStream(MediaStream mediaStream) { - String streamReactTag = null; - String streamId = mediaStream.label(); - // The native WebRTC implementation has a special concept of a default - // MediaStream instance with the label default that the implementation - // reuses. - if ("default".equals(streamId)) { - for (Map.Entry e - : remoteStreams.entrySet()) { - if (e.getValue().equals(mediaStream)) { - streamReactTag = e.getKey(); - break; - } - } + if (encodingsParams != null) { + for (int i = 0; i < encodingsParams.size(); i++) { + Map params = encodingsParams.get(i); + sendEncodings.add(mapToEncoding(params)); + } + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction), streamIds, sendEncodings); + } else { + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction), streamIds); + } + return init; + } + + private RtpParameters updateRtpParameters(RtpParameters parameters, Map newParameters) { + // new + final List> encodings = (List>) newParameters.get("encodings"); + // current + final List nativeEncodings = parameters.encodings; + + String degradationPreference = (String) newParameters.get("degradationPreference"); + if (degradationPreference != null) { + parameters.degradationPreference = RtpParameters.DegradationPreference.valueOf(degradationPreference.toUpperCase().replace("-", "_")); + } + + for (Map encoding : encodings) { + RtpParameters.Encoding currentParams = null; + String rid = (String) encoding.get("rid"); + + // find by rid + if (rid != null) { + for (RtpParameters.Encoding x : nativeEncodings) { + if (rid.equals(x.rid)) { + currentParams = x; + break; + } } + } - if (streamReactTag == null){ - streamReactTag = plugin.getNextStreamUUID(); - remoteStreams.put(streamId, mediaStream); + // fall back to index + if (currentParams == null) { + int idx = encodings.indexOf(encoding); + if (idx < nativeEncodings.size()) { + currentParams = nativeEncodings.get(idx); } + } + + if (currentParams != null) { + Boolean active = (Boolean) encoding.get("active"); + if (active != null) currentParams.active = active; + Integer maxBitrate = (Integer) encoding.get("maxBitrate"); + if (maxBitrate != null) currentParams.maxBitrateBps = maxBitrate; + Integer minBitrate = (Integer) encoding.get("minBitrate"); + if (minBitrate != null) currentParams.minBitrateBps = minBitrate; + Integer maxFramerate = (Integer) encoding.get("maxFramerate"); + if (maxFramerate != null) currentParams.maxFramerate = maxFramerate; + Integer numTemporalLayers = (Integer) encoding.get("numTemporalLayers"); + if (numTemporalLayers != null) currentParams.numTemporalLayers = numTemporalLayers; + Double scaleResolutionDownBy = (Double) encoding.get("scaleResolutionDownBy"); + if (scaleResolutionDownBy != null) + currentParams.scaleResolutionDownBy = scaleResolutionDownBy; + } + } - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onAddStream"); - params.putString("streamId", streamId); + return parameters; + } - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); + private Map rtpParametersToMap(RtpParameters rtpParameters) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("transactionId", rtpParameters.transactionId); + if(rtpParameters.degradationPreference != null) { + info.putString("degradationPreference", rtpParameters.degradationPreference.name().toLowerCase().replace("_", "-")); + } + ConstraintsMap rtcp = new ConstraintsMap(); + rtcp.putString("cname", rtpParameters.getRtcp().getCname()); + rtcp.putBoolean("reducedSize", rtpParameters.getRtcp().getReducedSize()); + info.putMap("rtcp", rtcp.toMap()); + + ConstraintsArray headerExtensions = new ConstraintsArray(); + for (RtpParameters.HeaderExtension extension : rtpParameters.getHeaderExtensions()) { + ConstraintsMap map = new ConstraintsMap(); + map.putString("uri", extension.getUri()); + map.putInt("id", extension.getId()); + map.putBoolean("encrypted", extension.getEncrypted()); + headerExtensions.pushMap(map); + } + info.putArray("headerExtensions", headerExtensions.toArrayList()); + + ConstraintsArray encodings = new ConstraintsArray(); + for (RtpParameters.Encoding encoding : rtpParameters.encodings) { + ConstraintsMap map = new ConstraintsMap(); + map.putBoolean("active", encoding.active); + if (encoding.rid != null) { + map.putString("rid", encoding.rid); + } + if (encoding.maxBitrateBps != null) { + map.putInt("maxBitrate", encoding.maxBitrateBps); + } + if (encoding.minBitrateBps != null) { + map.putInt("minBitrate", encoding.minBitrateBps); + } + if (encoding.maxFramerate != null) { + map.putInt("maxFramerate", encoding.maxFramerate); + } + if (encoding.numTemporalLayers != null) { + map.putInt("numTemporalLayers", encoding.numTemporalLayers); + } + if (encoding.scaleResolutionDownBy != null) { + map.putDouble("scaleResolutionDownBy", encoding.scaleResolutionDownBy); + } + if (encoding.ssrc != null) { + map.putLong("ssrc", encoding.ssrc); + } + encodings.pushMap(map); + } + info.putArray("encodings", encodings.toArrayList()); + + ConstraintsArray codecs = new ConstraintsArray(); + for (RtpParameters.Codec codec : rtpParameters.codecs) { + ConstraintsMap map = new ConstraintsMap(); + map.putString("name", codec.name); + map.putInt("payloadType", codec.payloadType); + map.putInt("clockRate", codec.clockRate); + if (codec.numChannels != null) { + map.putInt("numChannels", codec.numChannels); + } + map.putMap("parameters", new HashMap(codec.parameters)); + try { + Field field = codec.getClass().getDeclaredField("kind"); + field.setAccessible(true); + if (field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO)) { + map.putString("kind", "audio"); + } else if (field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO)) { + map.putString("kind", "video"); + } + } catch (NoSuchFieldException e1) { + e1.printStackTrace(); + } catch (IllegalArgumentException e1) { + e1.printStackTrace(); + } catch (IllegalAccessException e1) { + e1.printStackTrace(); + } + codecs.pushMap(map); + } - for (int i = 0; i < mediaStream.videoTracks.size(); i++) { - VideoTrack track = mediaStream.videoTracks.get(i); - String trackId = track.id(); + info.putArray("codecs", codecs.toArrayList()); + return info.toMap(); + } - remoteTracks.put(trackId, track); + @Nullable + private Map mediaStreamToMap(MediaStream stream) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("streamId", stream.getId()); + params.putString("ownerTag", id); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", "Video"); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - videoTracks.pushMap(trackInfo); - } - for (int i = 0; i < mediaStream.audioTracks.size(); i++) { - AudioTrack track = mediaStream.audioTracks.get(i); - String trackId = track.id(); - - remoteTracks.put(trackId, track); - - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", "Audio"); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - audioTracks.pushMap(trackInfo); - } - params.putArray("audioTracks", audioTracks.toArrayList()); - params.putArray("videoTracks", videoTracks.toArrayList()); + for (MediaStreamTrack track : stream.audioTracks) { + audioTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); + } - sendEvent(params); + for (MediaStreamTrack track : stream.videoTracks) { + videoTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); } + params.putArray("audioTracks", audioTracks.toArrayList()); + params.putArray("videoTracks", videoTracks.toArrayList()); + return params.toMap(); + } + + @Nullable + private Map mediaTrackToMap(MediaStreamTrack track) { + ConstraintsMap info = new ConstraintsMap(); + if (track != null) { + info.putString("id", track.id()); + info.putString("label", track.getClass() == VideoTrack.class ? "video" : "audio"); + info.putString("kind", track.kind()); + info.putBoolean("enabled", track.enabled()); + info.putString("readyState", track.state().toString()); + } + return info.toMap(); + } + + private Map dtmfSenderToMap(DtmfSender dtmfSender, String id) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("dtmfSenderId", id); + if (dtmfSender != null) { + info.putInt("interToneGap", dtmfSender.interToneGap()); + info.putInt("duration", dtmfSender.duration()); + } + return info.toMap(); + } + + private Map rtpSenderToMap(RtpSender sender) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("senderId", sender.id()); + info.putBoolean("ownsTrack", true); + info.putMap("dtmfSender", dtmfSenderToMap(sender.dtmf(), sender.id())); + info.putMap("rtpParameters", rtpParametersToMap(sender.getParameters())); + info.putMap("track", mediaTrackToMap(sender.track())); + return info.toMap(); + } + + private Map rtpReceiverToMap(RtpReceiver receiver) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("receiverId", receiver.id()); + info.putMap("rtpParameters", rtpParametersToMap(receiver.getParameters())); + info.putMap("track", mediaTrackToMap(receiver.track())); + return info.toMap(); + } + + Map transceiverToMap(String transceiverId, RtpTransceiver transceiver) { + ConstraintsMap info = new ConstraintsMap(); + info.putString("transceiverId", transceiverId); + if (transceiver.getMid() == null) { + info.putString("mid", ""); + } else { + info.putString("mid", transceiver.getMid()); + } + info.putString("direction", transceiverDirectionString(transceiver.getDirection())); + info.putMap("sender", rtpSenderToMap(transceiver.getSender())); + info.putMap("receiver", rtpReceiverToMap(transceiver.getReceiver())); + return info.toMap(); + } + + Map candidateToMap(IceCandidate candidate) { + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); + candidateParams.putString("sdpMid", candidate.sdpMid); + candidateParams.putString("candidate", candidate.sdp); + return candidateParams.toMap(); + } + + public void addTrack(MediaStreamTrack track, List streamIds, Result result) { + RtpSender sender = peerConnection.addTrack(track, streamIds); + result.success(rtpSenderToMap(sender)); + } + + public void removeTrack(String senderId, Result result) { + RtpSender sender = getRtpSenderById(senderId); + if (sender == null) { + resultError("removeTrack", "sender is null", result); + return; + } + boolean res = peerConnection.removeTrack(sender); + Map params = new HashMap<>(); + params.put("result", res); + result.success(params); + } + + public void addTransceiver(MediaStreamTrack track, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if (transceiverInit != null) { + transceiver = peerConnection.addTransceiver(track, mapToRtpTransceiverInit(transceiverInit)); + } else { + transceiver = peerConnection.addTransceiver(track); + } + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + } + transceivers.put(transceiverId, transceiver); + result.success(transceiverToMap(transceiverId, transceiver)); + } + + public void addTransceiverOfType(String mediaType, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if (transceiverInit != null) { + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType), mapToRtpTransceiverInit(transceiverInit)); + } else { + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType)); + } + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + } + transceivers.put(transceiverId, transceiver); + result.success(transceiverToMap(transceiverId, transceiver)); + } + + public void rtpTransceiverSetDirection(String direction, String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverSetDirection", "transceiver is null", result); + return; + } + transceiver.setDirection(stringToTransceiverDirection(direction)); + result.success(null); + } + + public void rtpTransceiverSetCodecPreferences(String transceiverId, List> codecs, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverSetCodecPreferences", "transceiver is null", result); + return; + } + List preferedCodecs = new ArrayList<>(); + for(Map codec : codecs) { + RtpCapabilities.CodecCapability codecCapability = new RtpCapabilities.CodecCapability(); + String mimeType = (String) codec.get("mimeType"); + List mimeTypeParts = Arrays.asList(mimeType.split("/")); + codecCapability.name = mimeTypeParts.get(1); + codecCapability.kind = stringToMediaType(mimeTypeParts.get(0)); + codecCapability.mimeType = mimeType; + codecCapability.clockRate = (int) codec.get("clockRate"); + if(codec.get("numChannels") != null) + codecCapability.numChannels = (int) codec.get("numChannels"); + if(codec.get("sdpFmtpLine") != null && codec.get("sdpFmtpLine") != "") { + String sdpFmtpLine = (String) codec.get("sdpFmtpLine"); + codecCapability.parameters = new HashMap<>(); + String[] parameters = sdpFmtpLine.split(";"); + for(String parameter : parameters) { + if(parameter.contains("=")) { + List parameterParts = Arrays.asList(parameter.split("=")); + codecCapability.parameters.put(parameterParts.get(0), parameterParts.get(1)); + } else { + codecCapability.parameters.put("", parameter); + } + } + } else { + codecCapability.parameters = new HashMap<>(); + } + preferedCodecs.add(codecCapability); + } + transceiver.setCodecPreferences(preferedCodecs); + result.success(null); + } + + public void rtpTransceiverGetDirection(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverGetDirection", "transceiver is null", result); + return; + } + ConstraintsMap params = new ConstraintsMap(); + params.putString("result", transceiverDirectionString(transceiver.getDirection())); + result.success(params.toMap()); + } + + public void rtpTransceiverGetCurrentDirection(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverGetCurrentDirection", "transceiver is null", result); + return; + } + RtpTransceiver.RtpTransceiverDirection direction = transceiver.getCurrentDirection(); + if (direction == null) { + result.success(null); + } else { + ConstraintsMap params = new ConstraintsMap(); + params.putString("result", transceiverDirectionString(direction)); + result.success(params.toMap()); + } + } - void sendEvent(ConstraintsMap event) { - if(eventSink != null ) - eventSink.success(event.toMap()); + public void rtpTransceiverStop(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverStop", "transceiver is null", result); + return; + } + transceiver.stop(); + result.success(null); + } + + public void rtpSenderSetParameters(String rtpSenderId, Map parameters, Result result) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetParameters", "sender is null", result); + return; + } + final RtpParameters updatedParameters = updateRtpParameters(sender.getParameters(), parameters); + final Boolean success = sender.setParameters(updatedParameters); + ConstraintsMap params = new ConstraintsMap(); + params.putBoolean("result", success); + result.success(params.toMap()); + } + + public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result result, boolean replace) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetTrack", "sender is null", result); + return; + } + sender.setTrack(track, false); + result.success(null); + } + + public void rtpSenderSetStreams(String rtpSenderId, List streamIds, Result result) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetStream", "sender is null", result); + return; + } + sender.setStreams(streamIds); + result.success(null); + } + + public void getSenders(Result result) { + List senders = peerConnection.getSenders(); + ConstraintsArray sendersParams = new ConstraintsArray(); + for (RtpSender sender : senders) { + sendersParams.pushMap(new ConstraintsMap(rtpSenderToMap(sender))); + } + ConstraintsMap params = new ConstraintsMap(); + params.putArray("senders", sendersParams.toArrayList()); + result.success(params.toMap()); + } + + public void getReceivers(Result result) { + List receivers = peerConnection.getReceivers(); + ConstraintsArray receiversParams = new ConstraintsArray(); + for (RtpReceiver receiver : receivers) { + receiversParams.pushMap(new ConstraintsMap(rtpReceiverToMap(receiver))); } + ConstraintsMap params = new ConstraintsMap(); + params.putArray("receivers", receiversParams.toArrayList()); + result.success(params.toMap()); + } + + public void getTransceivers(Result result) { + List transceivers = peerConnection.getTransceivers(); + ConstraintsArray transceiversParams = new ConstraintsArray(); + for (RtpTransceiver transceiver : transceivers) { + String transceiverId = transceiver.getMid(); + if (null == transceiverId) { + transceiverId = stateProvider.getNextStreamUUID(); + this.transceivers.put(transceiverId,transceiver); + } + transceiversParams.pushMap(new ConstraintsMap(transceiverToMap(transceiverId, transceiver))); + } + ConstraintsMap params = new ConstraintsMap(); + params.putArray("transceivers", transceiversParams.toArrayList()); + result.success(params.toMap()); + } + + protected MediaStreamTrack getTransceiversTrack(String trackId) { + if (this.configuration.sdpSemantics != PeerConnection.SdpSemantics.UNIFIED_PLAN) { + return null; + } + MediaStreamTrack track = null; + List transceivers = peerConnection.getTransceivers(); + for (RtpTransceiver transceiver : transceivers) { + RtpReceiver receiver = transceiver.getReceiver(); + if (receiver != null) { + if (receiver.track() != null && receiver.track().id().equals(trackId)) { + track = receiver.track(); + break; + } + } + } + return track; + } - @Override - public void onRemoveStream(MediaStream mediaStream) { + public String getNextDataChannelUUID() { + String uuid; - String streamId = mediaStream.label(); + do { + uuid = UUID.randomUUID().toString(); + } while (dataChannels.get(uuid) != null); - for (VideoTrack track : mediaStream.videoTracks) { - this.remoteTracks.remove(track.id()); - } - for (AudioTrack track : mediaStream.audioTracks) { - this.remoteTracks.remove(track.id()); - } + return uuid; + } - this.remoteStreams.remove(streamId); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onRemoveStream"); - params.putString("streamId", streamId); - sendEvent(params); - } - - @Override - public void onAddTrack(MediaStream mediaStream,MediaStreamTrack track){ - Log.d(TAG, "onAddTrack"); - - String streamId = mediaStream.label(); - String streamReactTag = streamId; - - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onAddTrack"); - params.putString("streamId", streamId); - params.putString("trackId", track.id()); - - String trackId = track.id(); - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", track.kind()); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - - params.putMap("track", trackInfo.toMap()); - - sendEvent(params); - } - - @Override - public void onRemoveTrack(MediaStream mediaStream,MediaStreamTrack track){ - Log.d(TAG, "onRemoveTrack"); - String streamId = mediaStream.label(); - String streamReactTag = streamId; - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onRemoveTrack"); - params.putString("streamId", streamId); - params.putString("trackId", track.id()); - - String trackId = track.id(); - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", track.kind()); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - - params.putMap("track", trackInfo.toMap()); - - sendEvent(params); - } - - - @Override - public void onDataChannel(DataChannel dataChannel) { - // XXX Unfortunately, the Java WebRTC API doesn't expose the id - // of the underlying C++/native DataChannel (even though the - // WebRTC standard defines the DataChannel.id property). As a - // workaround, generated an id which will surely not clash with - // the ids of the remotely-opened (and standard-compliant - // locally-opened) DataChannels. - int dataChannelId = -1; - // The RTCDataChannel.id space is limited to unsigned short by - // the standard: - // https://www.w3.org/TR/webrtc/#dom-datachannel-id. - // Additionally, 65535 is reserved due to SCTP INIT and - // INIT-ACK chunks only allowing a maximum of 65535 streams to - // be negotiated (as defined by the WebRTC Data Channel - // Establishment Protocol). - for (int i = 65536; i <= Integer.MAX_VALUE; ++i) { - if (null == dataChannels.get(i, null)) { - dataChannelId = i; - break; - } - } - if (-1 == dataChannelId) { - return; - } - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "didOpenDataChannel"); - params.putInt("id", dataChannelId); - params.putString("label", dataChannel.label()); - - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); - - sendEvent(params); - } - - private void registerDataChannelObserver(int dcId, DataChannel dataChannel) { - // DataChannel.registerObserver implementation does not allow to - // unregister, so the observer is registered here and is never - // unregistered - dataChannel.registerObserver( - new DataChannelObserver(plugin, id, dcId, dataChannel)); - } - - @Override - public void onRenegotiationNeeded() { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onRenegotiationNeeded"); - sendEvent(params); - } - - @Override - public void onSignalingChange(PeerConnection.SignalingState signalingState) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "signalingState"); - params.putString("state", signalingStateString(signalingState)); - sendEvent(params); - } - - @Override - public void onAddRtpReceiver(final RtpReceiver receiver, final MediaStream[] mediaStreams) { - Log.d(TAG, "onAddRtpReceiver"); - } - - @Nullable - private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { - switch (iceConnectionState) { - case NEW: - return "new"; - case CHECKING: - return "checking"; - case CONNECTED: - return "connected"; - case COMPLETED: - return "completed"; - case FAILED: - return "failed"; - case DISCONNECTED: - return "disconnected"; - case CLOSED: - return "closed"; - } - return null; - } - - @Nullable - private String iceGatheringStateString(PeerConnection.IceGatheringState iceGatheringState) { - switch (iceGatheringState) { - case NEW: - return "new"; - case GATHERING: - return "gathering"; - case COMPLETE: - return "complete"; - } - return null; - } - - @Nullable - private String signalingStateString(PeerConnection.SignalingState signalingState) { - switch (signalingState) { - case STABLE: - return "stable"; - case HAVE_LOCAL_OFFER: - return "have-local-offer"; - case HAVE_LOCAL_PRANSWER: - return "have-local-pranswer"; - case HAVE_REMOTE_OFFER: - return "have-remote-offer"; - case HAVE_REMOTE_PRANSWER: - return "have-remote-pranswer"; - case CLOSED: - return "closed"; - } - return null; - } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt b/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt new file mode 100644 index 0000000000..7ad366d387 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt @@ -0,0 +1,237 @@ +package com.cloudwebrtc.webrtc + +import org.webrtc.* +import java.util.concurrent.Callable +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors + +/* +Copyright 2017, Lyo Kato (Original Author) +Copyright 2017-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + */ +internal class SimulcastVideoEncoderFactoryWrapper( + sharedContext: EglBase.Context?, + enableIntelVp8Encoder: Boolean, + enableH264HighProfile: Boolean +) : VideoEncoderFactory { + + /** + * Factory that prioritizes software encoder. + * + * When the selected codec can't be handled by the software encoder, + * it uses the hardware encoder as a fallback. However, this class is + * primarily used to address an issue in libwebrtc, and does not have + * purposeful usecase itself. + * + * To use simulcast in libwebrtc, SimulcastEncoderAdapter is used. + * SimulcastEncoderAdapter takes in a primary and fallback encoder. + * If HardwareVideoEncoderFactory and SoftwareVideoEncoderFactory are + * passed in directly as primary and fallback, when H.264 is used, + * libwebrtc will crash. + * + * This is because SoftwareVideoEncoderFactory does not handle H.264, + * so [SoftwareVideoEncoderFactory.createEncoder] returns null, and + * the libwebrtc side does not handle nulls, regardless of whether the + * fallback is actually used or not. + * + * To avoid nulls, we simply pass responsibility over to the HardwareVideoEncoderFactory. + * This results in HardwareVideoEncoderFactory being both the primary and fallback, + * but there aren't any specific problems in doing so. + */ + private class FallbackFactory(private val hardwareVideoEncoderFactory: VideoEncoderFactory) : + VideoEncoderFactory { + + private val softwareVideoEncoderFactory: VideoEncoderFactory = SoftwareVideoEncoderFactory() + + override fun createEncoder(info: VideoCodecInfo): VideoEncoder? { + val softwareEncoder = softwareVideoEncoderFactory.createEncoder(info) + val hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info) + return if (hardwareEncoder != null && softwareEncoder != null) { + VideoEncoderFallback(hardwareEncoder, softwareEncoder) + } else { + softwareEncoder ?: hardwareEncoder + } + } + + override fun getSupportedCodecs(): Array { + val supportedCodecInfos: MutableList = mutableListOf() + supportedCodecInfos.addAll(softwareVideoEncoderFactory.supportedCodecs) + supportedCodecInfos.addAll(hardwareVideoEncoderFactory.supportedCodecs) + return supportedCodecInfos.toTypedArray() + } + + } + + /** + * Wraps each stream encoder and performs the following: + * - Starts up a single thread + * - When the width/height from [initEncode] doesn't match the frame buffer's, + * scales the frame prior to encoding. + * - Always calls the encoder on the thread. + */ + private class StreamEncoderWrapper(private val encoder: VideoEncoder) : VideoEncoder { + + val executor: ExecutorService = Executors.newSingleThreadExecutor() + var streamSettings: VideoEncoder.Settings? = null + + override fun initEncode( + settings: VideoEncoder.Settings, + callback: VideoEncoder.Callback? + ): VideoCodecStatus { + streamSettings = settings + val future = executor.submit(Callable { + // LKLog.i { + // """initEncode() thread=${Thread.currentThread().name} [${Thread.currentThread().id}] + // | encoder=${encoder.implementationName} + // | streamSettings: + // | numberOfCores=${settings.numberOfCores} + // | width=${settings.width} + // | height=${settings.height} + // | startBitrate=${settings.startBitrate} + // | maxFramerate=${settings.maxFramerate} + // | automaticResizeOn=${settings.automaticResizeOn} + // | numberOfSimulcastStreams=${settings.numberOfSimulcastStreams} + // | lossNotification=${settings.capabilities.lossNotification} + // """.trimMargin() + // } + return@Callable encoder.initEncode(settings, callback) + }) + return future.get() + } + + override fun release(): VideoCodecStatus { + val future = executor.submit(Callable { return@Callable encoder.release() }) + return future.get() + } + + override fun encode( + frame: VideoFrame, + encodeInfo: VideoEncoder.EncodeInfo? + ): VideoCodecStatus { + val future = executor.submit(Callable { + //LKLog.d { "encode() buffer=${frame.buffer}, thread=${Thread.currentThread().name} " + + // "[${Thread.currentThread().id}]" } + if (streamSettings == null) { + return@Callable encoder.encode(frame, encodeInfo) + } else if (frame.buffer.width == streamSettings!!.width) { + return@Callable encoder.encode(frame, encodeInfo) + } else { + // The incoming buffer is different than the streamSettings received in initEncode() + // Need to scale. + val originalBuffer = frame.buffer + // TODO: Do we need to handle when the scale factor is weird? + val adaptedBuffer = originalBuffer.cropAndScale( + 0, 0, originalBuffer.width, originalBuffer.height, + streamSettings!!.width, streamSettings!!.height + ) + val adaptedFrame = VideoFrame(adaptedBuffer, frame.rotation, frame.timestampNs) + val result = encoder.encode(adaptedFrame, encodeInfo) + adaptedBuffer.release() + return@Callable result + } + }) + return future.get() + } + + override fun setRateAllocation( + allocation: VideoEncoder.BitrateAllocation?, + frameRate: Int + ): VideoCodecStatus { + val future = executor.submit(Callable { + return@Callable encoder.setRateAllocation( + allocation, + frameRate + ) + }) + return future.get() + } + + override fun getScalingSettings(): VideoEncoder.ScalingSettings { + val future = executor.submit(Callable { return@Callable encoder.scalingSettings }) + return future.get() + } + + override fun getImplementationName(): String { + val future = executor.submit(Callable { return@Callable encoder.implementationName }) + return future.get() + } + + override fun createNative(webrtcEnvRef: Long): Long { + val future = executor.submit(Callable { return@Callable encoder.createNative(webrtcEnvRef) }) + return future.get() + } + + override fun isHardwareEncoder(): Boolean { + val future = executor.submit(Callable { return@Callable encoder.isHardwareEncoder }) + return future.get() + } + + override fun setRates(rcParameters: VideoEncoder.RateControlParameters?): VideoCodecStatus { + val future = executor.submit(Callable { return@Callable encoder.setRates(rcParameters) }) + return future.get() + } + + override fun getResolutionBitrateLimits(): Array { + val future = executor.submit(Callable { return@Callable encoder.resolutionBitrateLimits }) + return future.get() + } + + override fun getEncoderInfo(): VideoEncoder.EncoderInfo { + val future = executor.submit(Callable { return@Callable encoder.encoderInfo }) + return future.get() + } + } + + private class StreamEncoderWrapperFactory(private val factory: VideoEncoderFactory) : + VideoEncoderFactory { + override fun createEncoder(videoCodecInfo: VideoCodecInfo?): VideoEncoder? { + val encoder = factory.createEncoder(videoCodecInfo) + if (encoder == null) { + return null + } + if (encoder is WrappedNativeVideoEncoder) { + return encoder + } + return StreamEncoderWrapper(encoder) + } + + override fun getSupportedCodecs(): Array { + return factory.supportedCodecs + } + } + + + private val primary: VideoEncoderFactory + private val fallback: VideoEncoderFactory + private val native: SimulcastVideoEncoderFactory + + init { + val hardwareVideoEncoderFactory = HardwareVideoEncoderFactory( + sharedContext, enableIntelVp8Encoder, enableH264HighProfile + ) + primary = StreamEncoderWrapperFactory(hardwareVideoEncoderFactory) + fallback = StreamEncoderWrapperFactory(FallbackFactory(primary)) + native = SimulcastVideoEncoderFactory(primary, fallback) + } + + override fun createEncoder(info: VideoCodecInfo?): VideoEncoder? { + return native.createEncoder(info) + } + + override fun getSupportedCodecs(): Array { + return native.supportedCodecs + } + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java b/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java new file mode 100644 index 0000000000..0471f21983 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java @@ -0,0 +1,42 @@ +package com.cloudwebrtc.webrtc; + +import android.app.Activity; +import android.content.Context; + +import androidx.annotation.Nullable; +import java.util.Map; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnectionFactory; + +import io.flutter.plugin.common.BinaryMessenger; + +/** + * Provides interested components with access to the current application state. + * + * It is encouraged to use this class instead of a component directly. + */ +public interface StateProvider { + + boolean putLocalStream(String streamId, MediaStream stream); + + boolean putLocalTrack(String trackId, LocalTrack track); + + LocalTrack getLocalTrack(String trackId); + + String getNextStreamUUID(); + + String getNextTrackUUID(); + + PeerConnectionFactory getPeerConnectionFactory(); + + PeerConnectionObserver getPeerConnectionObserver(String peerConnectionId); + + @Nullable + Activity getActivity(); + + @Nullable + Context getApplicationContext(); + + BinaryMessenger getMessenger(); +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java index e286b2d63a..faa783a351 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java @@ -1,190 +1,46 @@ package com.cloudwebrtc.webrtc; -import android.content.Context; -import android.content.res.Resources.NotFoundException; -import android.graphics.Point; import android.graphics.SurfaceTexture; -import android.opengl.GLES20; -import android.os.Handler; -import android.os.HandlerThread; - -import java.util.concurrent.CountDownLatch; +import android.view.Surface; import org.webrtc.EglBase; +import org.webrtc.EglRenderer; import org.webrtc.GlRectDrawer; -import org.webrtc.GlUtil; -import org.webrtc.Logging; import org.webrtc.RendererCommon; import org.webrtc.ThreadUtils; -import org.webrtc.VideoRenderer; +import org.webrtc.VideoFrame; + +import java.util.concurrent.CountDownLatch; + +import io.flutter.view.TextureRegistry; -import java.nio.ByteBuffer; /** - * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceTexture. + * Display the video stream on a Surface. * renderFrame() is asynchronous to avoid blocking the calling thread. - * This class is thread safe and handles access from potentially four different threads: - * Interaction from the main app in init, release, setMirror, and setScalingtype. + * This class is thread safe and handles access from potentially three different threads: + * Interaction from the main app in init, release and setMirror. * Interaction from C++ rtc::VideoSinkInterface in renderFrame. - * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed. - * Interaction with the layout framework in onMeasure and onSizeChanged. + * Interaction from SurfaceHolder lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed. */ -public class SurfaceTextureRenderer implements VideoRenderer.Callbacks { - private static final String TAG = "SurfaceTextureRenderer"; - - private final SurfaceTexture texture; - // Dedicated render thread. - private HandlerThread renderThread; - // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized - // on |handlerLock|. - private final Object handlerLock = new Object(); - private Handler renderThreadHandler; - - // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed - // from the render thread. - private EglBase eglBase; - private final YuvUploader yuvUploader = new YuvUploader(); - private RendererCommon.GlDrawer drawer; - // Texture ids for YUV frames. Allocated on first arrival of a YUV frame. - private int[] yuvTextures = null; - - // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|. - private final Object frameLock = new Object(); - private VideoRenderer.I420Frame pendingFrame; - - // These variables are synchronized on |layoutLock|. +public class SurfaceTextureRenderer extends EglRenderer { + // Callback for reporting renderer events. Read-only after initilization so no lock required. + private RendererCommon.RendererEvents rendererEvents; private final Object layoutLock = new Object(); - - // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in - // onLayout() and surfaceChanged() respectively. - private final Point layoutSize = new Point(); - // TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple - // layout and surface size. - private final Point surfaceSize = new Point(); - // |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed(). - private boolean isSurfaceCreated; - // Last rendered frame dimensions, or 0 if no frame has been rendered yet. - private int frameWidth; - private int frameHeight; + private boolean isRenderingPaused; + private boolean isFirstFrameRendered; + private int rotatedFrameWidth; + private int rotatedFrameHeight; private int frameRotation; - // If true, mirrors the video stream horizontally. - private boolean mirror; - // Callback for reporting renderer events. - private RendererCommon.RendererEvents rendererEvents; - - // These variables are synchronized on |statisticsLock|. - private final Object statisticsLock = new Object(); - // Total number of video frames received in renderFrame() call. - private int framesReceived; - // Number of video frames dropped by renderFrame() because previous frame has not been rendered - // yet. - private int framesDropped; - // Number of rendered video frames. - private int framesRendered; - // Time in ns when the first video frame was rendered. - private long firstFrameTimeNs; - // Time in ns spent in renderFrameOnRenderThread() function. - private long renderTimeNs; - /** - * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This - * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies. + * In order to render something, you must first call init(). */ - public static class YuvUploader { - // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width. - // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader - // that handles stride and compare performance with intermediate copy. - private ByteBuffer copyBuffer; - private int[] yuvTextures; - - /** - * Upload |planes| into OpenGL textures, taking stride into consideration. - * - * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively. - */ - public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) { - final int[] planeWidths = new int[] {width, width / 2, width / 2}; - final int[] planeHeights = new int[] {height, height / 2, height / 2}; - // Make a first pass to see if we need a temporary copy buffer. - int copyCapacityNeeded = 0; - for (int i = 0; i < 3; ++i) { - if (strides[i] > planeWidths[i]) { - copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]); - } - } - // Allocate copy buffer if necessary. - if (copyCapacityNeeded > 0 - && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) { - copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded); - } - // Make sure YUV textures are allocated. - if (yuvTextures == null) { - yuvTextures = new int[3]; - for (int i = 0; i < 3; i++) { - yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); - } - } - // Upload each plane. - for (int i = 0; i < 3; ++i) { - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); - // GLES only accepts packed data, i.e. stride == planeWidth. - final ByteBuffer packedByteBuffer; - if (strides[i] == planeWidths[i]) { - // Input is packed already. - packedByteBuffer = planes[i]; - } else { - VideoRenderer.nativeCopyPlane( - planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]); - packedByteBuffer = copyBuffer; - } - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i], - planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer); - } - return yuvTextures; - } - - /** - * Releases cached resources. Uploader can still be used and the resources will be reallocated - * on first use. - */ - public void release() { - copyBuffer = null; - if (yuvTextures != null) { - GLES20.glDeleteTextures(3, yuvTextures, 0); - yuvTextures = null; - } - } + public SurfaceTextureRenderer(String name) { + super(name); } - - // Runnable for posting frames to render thread. - private final Runnable renderFrameRunnable = new Runnable() { - @Override public void run() { - renderFrameOnRenderThread(); - } - }; - // Runnable for clearing Surface to black. - private final Runnable makeBlackRunnable = new Runnable() { - @Override public void run() { - makeBlack(); - } - }; - - public SurfaceTextureRenderer(Context context, SurfaceTexture texture) { - this.texture = texture; - synchronized (layoutLock) { - isSurfaceCreated = true; - } - tryCreateEglSurface(); - } - - /** - * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to - * reinitialize the renderer after a previous init()/release() cycle. - */ - public void init( - EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) { + public void init(final EglBase.Context sharedContext, + RendererCommon.RendererEvents rendererEvents) { init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer()); } @@ -194,369 +50,116 @@ public void init( * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous * init()/release() cycle. */ - public void init( - final EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents, - final int[] configAttributes, RendererCommon.GlDrawer drawer) { - synchronized (handlerLock) { - if (renderThreadHandler != null) { - throw new IllegalStateException(getResourceName() + "Already initialized"); - } - Logging.d(TAG, getResourceName() + "Initializing."); - this.rendererEvents = rendererEvents; - this.drawer = drawer; - renderThread = new HandlerThread(TAG); - renderThread.start(); - renderThreadHandler = new Handler(renderThread.getLooper()); - // Create EGL context on the newly created render thread. It should be possibly to create the - // context on this thread and make it current on the render thread, but this causes failure on - // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350. - ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() { - @Override - public void run() { - eglBase = EglBase.create(sharedContext, configAttributes); - } - }); - } - tryCreateEglSurface(); - } - - /** - * Create and make an EGLSurface current if both init() and surfaceCreated() have been called. - */ - public void tryCreateEglSurface() { - // |renderThreadHandler| is only created after |eglBase| is created in init(), so the - // following code will only execute if eglBase != null. - runOnRenderThread(new Runnable() { - @Override - public void run() { - synchronized (layoutLock) { - if (texture != null && eglBase != null && isSurfaceCreated && !eglBase.hasSurface()) { - eglBase.createSurface(texture); - eglBase.makeCurrent(); - // Necessary for YUV frames with odd width. - GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); - } - } - - // XXX by Saúl Ibarra Corretgé : Until an actual frame - // is available to render, draw black; otherwise, this SurfaceView will - // appear transparent. - makeBlack(); - } - }); - } - - /** - * Block until any pending frame is returned and all GL resources released, even if an interrupt - * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function - * should be called before the Activity is destroyed and the EGLContext is still valid. If you - * don't call this function, the GL resources might leak. - */ - public void release() { - final CountDownLatch eglCleanupBarrier = new CountDownLatch(1); - synchronized (handlerLock) { - if (renderThreadHandler == null) { - Logging.d(TAG, getResourceName() + "Already released"); - return; - } - // Release EGL and GL resources on render thread. - // TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted - // when the EGL context is lost. It might be dangerous to delete them manually in - // Activity.onDestroy(). - renderThreadHandler.postAtFrontOfQueue(new Runnable() { - @Override public void run() { - drawer.release(); - drawer = null; - if (yuvTextures != null) { - GLES20.glDeleteTextures(3, yuvTextures, 0); - yuvTextures = null; - } - // Clear last rendered image to black. - makeBlack(); - eglBase.release(); - eglBase = null; - eglCleanupBarrier.countDown(); - } - }); - // Don't accept any more frames or messages to the render thread. - renderThreadHandler = null; - } - // Make sure the EGL/GL cleanup posted above is executed. - ThreadUtils.awaitUninterruptibly(eglCleanupBarrier); - renderThread.quit(); - synchronized (frameLock) { - if (pendingFrame != null) { - VideoRenderer.renderFrameDone(pendingFrame); - pendingFrame = null; - } - } - // The |renderThread| cleanup is not safe to cancel and we need to wait until it's done. - ThreadUtils.joinUninterruptibly(renderThread); - renderThread = null; - // Reset statistics and event reporting. + public void init(final EglBase.Context sharedContext, + RendererCommon.RendererEvents rendererEvents, final int[] configAttributes, + RendererCommon.GlDrawer drawer) { + ThreadUtils.checkIsOnMainThread(); + this.rendererEvents = rendererEvents; synchronized (layoutLock) { - frameWidth = 0; - frameHeight = 0; - frameRotation = 0; - rendererEvents = null; + isFirstFrameRendered = false; + rotatedFrameWidth = 0; + rotatedFrameHeight = 0; + frameRotation = -1; } - resetStatistics(); + super.init(sharedContext, configAttributes, drawer); } - - /** - * Reset statistics. This will reset the logged statistics in logStatistics(), and - * RendererEvents.onFirstFrameRendered() will be called for the next frame. - */ - public void resetStatistics() { - synchronized (statisticsLock) { - framesReceived = 0; - framesDropped = 0; - framesRendered = 0; - firstFrameTimeNs = 0; - renderTimeNs = 0; - } + @Override + public void init(final EglBase.Context sharedContext, final int[] configAttributes, + RendererCommon.GlDrawer drawer) { + init(sharedContext, null /* rendererEvents */, configAttributes, drawer); } - /** - * Set if the video stream should be mirrored or not. + * Limit render framerate. + * + * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps + * reduction. */ - public void setMirror(final boolean mirror) { + @Override + public void setFpsReduction(float fps) { synchronized (layoutLock) { - this.mirror = mirror; + isRenderingPaused = fps == 0f; } + super.setFpsReduction(fps); } - - // VideoRenderer.Callbacks interface. @Override - public void renderFrame(VideoRenderer.I420Frame frame) { - synchronized (statisticsLock) { - ++framesReceived; - } - synchronized (handlerLock) { - if (renderThreadHandler == null) { - Logging.d(TAG, getResourceName() - + "Dropping frame - Not initialized or already released."); - VideoRenderer.renderFrameDone(frame); - return; - } - synchronized (frameLock) { - if (pendingFrame != null) { - // Drop old frame. - synchronized (statisticsLock) { - ++framesDropped; - } - VideoRenderer.renderFrameDone(pendingFrame); - } - pendingFrame = frame; - renderThreadHandler.post(renderFrameRunnable); - } - } - } - - public void surfaceDestroyed() { - Logging.d(TAG, getResourceName() + "Surface destroyed."); + public void disableFpsReduction() { synchronized (layoutLock) { - isSurfaceCreated = false; - surfaceSize.x = 0; - surfaceSize.y = 0; + isRenderingPaused = false; } - runOnRenderThread(new Runnable() { - @Override - public void run() { - if (eglBase != null) { - eglBase.detachCurrent(); - eglBase.releaseSurface(); - } - } - }); + super.disableFpsReduction(); } - - public void surfaceChanged(int width, int height) { - Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height); + @Override + public void pauseVideo() { synchronized (layoutLock) { - surfaceSize.x = width; - surfaceSize.y = height; - layoutSize.x = width; - layoutSize.y = height; + isRenderingPaused = true; } - // Might have a pending frame waiting for a surface of correct size. - runOnRenderThread(renderFrameRunnable); + super.pauseVideo(); } - - /** - * Private helper function to post tasks safely. - */ - private void runOnRenderThread(Runnable runnable) { - synchronized (handlerLock) { - if (renderThreadHandler != null) { - renderThreadHandler.post(runnable); - } + // VideoSink interface. + @Override + public void onFrame(VideoFrame frame) { + if(surface == null) { + producer.setSize(frame.getRotatedWidth(),frame.getRotatedHeight()); + surface = producer.getSurface(); + createEglSurface(surface); } + updateFrameDimensionsAndReportEvents(frame); + super.onFrame(frame); } - private String getResourceName() { - try { - return "SurfaceTextureRenderer: "; - } catch (NotFoundException e) { - return ""; - } - } + private Surface surface = null; - private void makeBlack() { - if (Thread.currentThread() != renderThread) { - throw new IllegalStateException(getResourceName() + "Wrong thread."); - } - if (eglBase != null && eglBase.hasSurface()) { - GLES20.glClearColor(0, 0, 0, 0); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - eglBase.swapBuffers(); - } - } + private TextureRegistry.SurfaceProducer producer; - /** - * Requests new layout if necessary. Returns true if layout and surface size are consistent. - */ - private boolean checkConsistentLayout() { - if (Thread.currentThread() != renderThread) { - throw new IllegalStateException(getResourceName() + "Wrong thread."); - } - synchronized (layoutLock) { - return surfaceSize.equals(layoutSize); - } - } + public void surfaceCreated(final TextureRegistry.SurfaceProducer producer) { + ThreadUtils.checkIsOnMainThread(); + this.producer = producer; + this.producer.setCallback( + new TextureRegistry.SurfaceProducer.Callback() { + @Override + public void onSurfaceAvailable() { + // Do surface initialization here, and draw the current frame. + } - /** - * Renders and releases |pendingFrame|. - */ - private void renderFrameOnRenderThread() { - if (Thread.currentThread() != renderThread) { - throw new IllegalStateException(getResourceName() + "Wrong thread."); - } - // Fetch and render |pendingFrame|. - final VideoRenderer.I420Frame frame; - synchronized (frameLock) { - if (pendingFrame == null) { - return; - } - frame = pendingFrame; - pendingFrame = null; - } - updateFrameDimensionsAndReportEvents(frame); - if (eglBase == null || !eglBase.hasSurface()) { - Logging.d(TAG, getResourceName() + "No surface to draw on"); - VideoRenderer.renderFrameDone(frame); - return; - } - if (!checkConsistentLayout()) { - // Output intermediate black frames while the layout is updated. - //makeBlack(); - //VideoRenderer.renderFrameDone(frame); - return; - } - // After a surface size change, the EGLSurface might still have a buffer of the old size in the - // pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet - // changed. Such a buffer will be rendered incorrectly, so flush it with a black frame. - synchronized (layoutLock) { - if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) { - makeBlack(); - } - } - - final long startTimeNs = System.nanoTime(); - final float[] texMatrix; - synchronized (layoutLock) { - final float[] rotatedSamplingMatrix = - RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree); - final float[] layoutMatrix = RendererCommon.getLayoutMatrix( - mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y); - texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); - } - - // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's - // a workaround for bug 5147. Performance will be slightly worse. - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - if (frame.yuvFrame) { - // Make sure YUV textures are allocated. - if (yuvTextures == null) { - yuvTextures = new int[3]; - for (int i = 0; i < 3; i++) { - yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); - } - } - yuvTextures = yuvUploader.uploadYuvData( - frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes); - drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), - 0, 0, surfaceSize.x, surfaceSize.y); - } else { - drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), - 0, 0, surfaceSize.x, surfaceSize.y); - } - - eglBase.swapBuffers(); - VideoRenderer.renderFrameDone(frame); - synchronized (statisticsLock) { - if (framesRendered == 0) { - firstFrameTimeNs = startTimeNs; - synchronized (layoutLock) { - Logging.d(TAG, getResourceName() + "Reporting first rendered frame."); - if (rendererEvents != null) { - rendererEvents.onFirstFrameRendered(); - } - } - } - ++framesRendered; - renderTimeNs += (System.nanoTime() - startTimeNs); - if (framesRendered % 300 == 0) { - logStatistics(); - } - } + @Override + public void onSurfaceCleanup() { + surfaceDestroyed(); + } + } + ); } - // Return current frame aspect ratio, taking rotation into account. - private float frameAspectRatio() { - synchronized (layoutLock) { - if (frameWidth == 0 || frameHeight == 0) { - return 0.0f; - } - return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight - : (float) frameHeight / frameWidth; - } + public void surfaceDestroyed() { + ThreadUtils.checkIsOnMainThread(); + final CountDownLatch completionLatch = new CountDownLatch(1); + releaseEglSurface(completionLatch::countDown); + ThreadUtils.awaitUninterruptibly(completionLatch); + surface = null; } // Update frame dimensions and report any changes to |rendererEvents|. - private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) { + private void updateFrameDimensionsAndReportEvents(VideoFrame frame) { synchronized (layoutLock) { - if (frameWidth != frame.width || frameHeight != frame.height - || frameRotation != frame.rotationDegree) { - Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " - + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree); + if (isRenderingPaused) { + return; + } + if (!isFirstFrameRendered) { + isFirstFrameRendered = true; if (rendererEvents != null) { - rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree); - } - frameWidth = frame.width; - frameHeight = frame.height; - frameRotation = frame.rotationDegree; - if(frameRotation == 90 || frameRotation == 270) { - texture.setDefaultBufferSize(frameHeight, frameWidth); - surfaceChanged(frameHeight, frameWidth); - }else { - texture.setDefaultBufferSize(frameWidth, frameHeight); - surfaceChanged(frameWidth, frameHeight); + rendererEvents.onFirstFrameRendered(); } } - } - } - - private void logStatistics() { - synchronized (statisticsLock) { - Logging.d(TAG, getResourceName() + "Frames received: " - + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered); - if (framesReceived > 0 && framesRendered > 0) { - final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs; - Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + - " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs); - Logging.d(TAG, getResourceName() + "Average render time: " - + (int) (renderTimeNs / (1000 * framesRendered)) + " us."); + if (rotatedFrameWidth != frame.getRotatedWidth() + || rotatedFrameHeight != frame.getRotatedHeight() + || frameRotation != frame.getRotation()) { + if (rendererEvents != null) { + rendererEvents.onFrameResolutionChanged( + frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation()); + } + rotatedFrameWidth = frame.getRotatedWidth(); + rotatedFrameHeight = frame.getRotatedHeight(); + producer.setSize(rotatedFrameWidth, rotatedFrameHeight); + frameRotation = frame.getRotation(); } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java new file mode 100644 index 0000000000..df6a7cd7c8 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java @@ -0,0 +1,40 @@ +package com.cloudwebrtc.webrtc.audio; + +import androidx.annotation.Nullable; + +import com.twilio.audioswitch.AudioDevice; + +public enum AudioDeviceKind { + BLUETOOTH("bluetooth", AudioDevice.BluetoothHeadset.class), + WIRED_HEADSET("wired-headset", AudioDevice.WiredHeadset.class), + SPEAKER("speaker", AudioDevice.Speakerphone.class), + EARPIECE("earpiece", AudioDevice.Earpiece.class); + + public final String typeName; + public final Class audioDeviceClass; + + AudioDeviceKind(String typeName, Class audioDeviceClass) { + this.typeName = typeName; + this.audioDeviceClass = audioDeviceClass; + } + + @Nullable + public static AudioDeviceKind fromAudioDevice(AudioDevice audioDevice) { + for (AudioDeviceKind kind : values()) { + if (kind.audioDeviceClass.equals(audioDevice.getClass())) { + return kind; + } + } + return null; + } + + @Nullable + public static AudioDeviceKind fromTypeName(String typeName) { + for (AudioDeviceKind kind : values()) { + if (kind.typeName.equals(typeName)) { + return kind; + } + } + return null; + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingAdapter.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingAdapter.java new file mode 100644 index 0000000000..b91409cb7b --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingAdapter.java @@ -0,0 +1,59 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.ExternalAudioProcessingFactory; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +public class AudioProcessingAdapter implements ExternalAudioProcessingFactory.AudioProcessing { + public interface ExternalAudioFrameProcessing { + void initialize(int sampleRateHz, int numChannels); + + void reset(int newRate); + + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + public AudioProcessingAdapter() {} + List audioProcessors = new ArrayList<>(); + + public void addProcessor(ExternalAudioFrameProcessing audioProcessor) { + synchronized (audioProcessors) { + audioProcessors.add(audioProcessor); + } + } + + public void removeProcessor(ExternalAudioFrameProcessing audioProcessor) { + synchronized (audioProcessors) { + audioProcessors.remove(audioProcessor); + } + } + + @Override + public void initialize(int sampleRateHz, int numChannels) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.initialize(sampleRateHz, numChannels); + } + } + } + + @Override + public void reset(int newRate) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.reset(newRate); + } + } + } + + @Override + public void process(int numBands, int numFrames, ByteBuffer buffer) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.process(numBands, numFrames, buffer); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingController.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingController.java new file mode 100644 index 0000000000..a84589b600 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioProcessingController.java @@ -0,0 +1,24 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.ExternalAudioProcessingFactory; + +public class AudioProcessingController { + /** + * This is the audio processing module that will be applied to the audio stream after it is captured from the microphone. + * This is useful for adding echo cancellation, noise suppression, etc. + */ + public final AudioProcessingAdapter capturePostProcessing = new AudioProcessingAdapter(); + /** + * This is the audio processing module that will be applied to the audio stream before it is rendered to the speaker. + */ + public final AudioProcessingAdapter renderPreProcessing = new AudioProcessingAdapter(); + + public ExternalAudioProcessingFactory externalAudioProcessingFactory; + + public AudioProcessingController() { + this.externalAudioProcessingFactory = new ExternalAudioProcessingFactory(); + this.externalAudioProcessingFactory.setCapturePostProcessing(capturePostProcessing); + this.externalAudioProcessingFactory.setRenderPreProcessing(renderPreProcessing); + } + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java new file mode 100644 index 0000000000..a2da4c088e --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java @@ -0,0 +1,410 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioManager; +import android.os.Build; +import android.os.Handler; +import android.os.Looper; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import com.twilio.audioswitch.AudioDevice; +import com.twilio.audioswitch.AudioSwitch; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import kotlin.Unit; +import kotlin.jvm.functions.Function2; + +public class AudioSwitchManager { + + public static final String TAG = "AudioSwitchManager"; + + @SuppressLint("StaticFieldLeak") + public static AudioSwitchManager instance; + @NonNull + private final Context context; + @NonNull + private final AudioManager audioManager; + + public boolean loggingEnabled; + private boolean isActive = false; + @NonNull + public Function2< + ? super List, + ? super AudioDevice, + Unit> audioDeviceChangeListener = (devices, currentDevice) -> null; + + @NonNull + public AudioManager.OnAudioFocusChangeListener audioFocusChangeListener = (i -> { + }); + + @NonNull + public List> preferredDeviceList; + + // AudioSwitch is not threadsafe, so all calls should be done on the main thread. + private final Handler handler = new Handler(Looper.getMainLooper()); + + @Nullable + private AudioSwitch audioSwitch; + + /** + * When true, AudioSwitchManager will request audio focus on start and abandon on stop. + *
+ * Defaults to true. + */ + private boolean manageAudioFocus = true; + + /** + * The audio focus mode to use while started. + *
+ * Defaults to AudioManager.AUDIOFOCUS_GAIN. + */ + private int focusMode = AudioManager.AUDIOFOCUS_GAIN; + + /** + * The audio mode to use while started. + *
+ * Defaults to AudioManager.MODE_NORMAL. + */ + private int audioMode = AudioManager.MODE_IN_COMMUNICATION; + + /** + * The audio stream type to use when requesting audio focus on pre-O devices. + *
+ * Defaults to AudioManager.STREAM_VOICE_CALL. + *
+ * Refer to this compatibility table + * to ensure that your values match between android versions. + *
+ * Note: Manual audio routing may not work appropriately when using non-default values. + */ + private int audioStreamType = AudioManager.STREAM_VOICE_CALL; + + /** + * The audio attribute usage type to use when requesting audio focus on devices O and beyond. + *
+ * Defaults to AudioAttributes.USAGE_VOICE_COMMUNICATION. + *
+ * Refer to this compatibility table + * to ensure that your values match between android versions. + *
+ * Note: Manual audio routing may not work appropriately when using non-default values. + */ + private int audioAttributeUsageType = AudioAttributes.USAGE_VOICE_COMMUNICATION; + + /** + * The audio attribute content type to use when requesting audio focus on devices O and beyond. + *
+ * Defaults to AudioAttributes.CONTENT_TYPE_SPEECH. + *
+ * Refer to this compatibility table + * to ensure that your values match between android versions. + *
+ * Note: Manual audio routing may not work appropriately when using non-default values. + */ + private int audioAttributeContentType = AudioAttributes.CONTENT_TYPE_SPEECH; + + /** + * On certain Android devices, audio routing does not function properly and bluetooth microphones will not work + * unless audio mode is set to [AudioManager.MODE_IN_COMMUNICATION] or [AudioManager.MODE_IN_CALL]. + * + * AudioSwitchManager by default will not handle audio routing in those cases to avoid audio issues. + * + * If this set to true, AudioSwitchManager will attempt to do audio routing, though behavior is undefined. + */ + private boolean forceHandleAudioRouting = false; + + public AudioSwitchManager(@NonNull Context context) { + this.context = context; + this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + + preferredDeviceList = new ArrayList<>(); + preferredDeviceList.add(AudioDevice.BluetoothHeadset.class); + preferredDeviceList.add(AudioDevice.WiredHeadset.class); + preferredDeviceList.add(AudioDevice.Speakerphone.class); + preferredDeviceList.add(AudioDevice.Earpiece.class); + initAudioSwitch(); + } + + private void initAudioSwitch() { + if (audioSwitch == null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + audioSwitch = new AudioSwitch( + context, + loggingEnabled, + audioFocusChangeListener, + preferredDeviceList + ); + audioSwitch.setManageAudioFocus(manageAudioFocus); + audioSwitch.setFocusMode(focusMode); + audioSwitch.setAudioMode(audioMode); + audioSwitch.setAudioStreamType(audioStreamType); + audioSwitch.setAudioAttributeContentType(audioAttributeContentType); + audioSwitch.setAudioAttributeUsageType(audioAttributeUsageType); + audioSwitch.setForceHandleAudioRouting(forceHandleAudioRouting); + audioSwitch.start(audioDeviceChangeListener); + }); + } + } + + public void start() { + if (audioSwitch != null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + if (!isActive) { + Objects.requireNonNull(audioSwitch).activate(); + isActive = true; + } + }); + } + } + + public void stop() { + if (audioSwitch != null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + if (isActive) { + Objects.requireNonNull(audioSwitch).deactivate(); + isActive = false; + } + }); + } + } + + public void setMicrophoneMute(boolean mute) { + audioManager.setMicrophoneMute(mute); + } + + @Nullable + public AudioDevice selectedAudioDevice() { + return Objects.requireNonNull(audioSwitch).getSelectedAudioDevice(); + } + + @NonNull + public List availableAudioDevices() { + return Objects.requireNonNull(audioSwitch).getAvailableAudioDevices(); + } + + public void selectAudioOutput(@NonNull Class audioDeviceClass) { + handler.post(() -> { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + for (AudioDevice device : devices) { + if (device.getClass().equals(audioDeviceClass)) { + audioDevice = device; + break; + } + } + if (audioDevice != null) { + Objects.requireNonNull(audioSwitch).selectDevice(audioDevice); + } + }); + } + + private void updatePreferredDeviceList(boolean speakerOn) { + preferredDeviceList = new ArrayList<>(); + preferredDeviceList.add(AudioDevice.BluetoothHeadset.class); + preferredDeviceList.add(AudioDevice.WiredHeadset.class); + if (speakerOn) { + preferredDeviceList.add(AudioDevice.Speakerphone.class); + preferredDeviceList.add(AudioDevice.Earpiece.class); + } else { + preferredDeviceList.add(AudioDevice.Earpiece.class); + preferredDeviceList.add(AudioDevice.Speakerphone.class); + } + handler.post(() -> { + Objects.requireNonNull(audioSwitch).setPreferredDeviceList(preferredDeviceList); + }); + } + + public void enableSpeakerphone(boolean enable) { + updatePreferredDeviceList(enable); + if (enable) { + selectAudioOutput(AudioDevice.Speakerphone.class); + } else { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + for (AudioDevice device : devices) { + if (device.getClass().equals(AudioDevice.BluetoothHeadset.class)) { + audioDevice = device; + break; + } else if (device.getClass().equals(AudioDevice.WiredHeadset.class)) { + audioDevice = device; + break; + } else if (device.getClass().equals(AudioDevice.Earpiece.class)) { + audioDevice = device; + break; + } + } + if (audioDevice != null) { + selectAudioOutput(audioDevice.getClass()); + } else { + handler.post(() -> { + Objects.requireNonNull(audioSwitch).selectDevice(null); + }); + } + } + } + + public void enableSpeakerButPreferBluetooth() { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + for (AudioDevice device : devices) { + if (device.getClass().equals(AudioDevice.BluetoothHeadset.class)) { + audioDevice = device; + break; + } else if (device.getClass().equals(AudioDevice.WiredHeadset.class)) { + audioDevice = device; + break; + } + } + + if (audioDevice == null) { + selectAudioOutput(AudioDevice.Speakerphone.class); + } else { + selectAudioOutput(audioDevice.getClass()); + } + } + + public void selectAudioOutput(@Nullable AudioDeviceKind kind) { + if (kind != null) { + selectAudioOutput(kind.audioDeviceClass); + } + } + + public void setAudioConfiguration(Map configuration) { + if (configuration == null) { + return; + } + + Boolean manageAudioFocus = null; + if (configuration.get("manageAudioFocus") instanceof Boolean) { + manageAudioFocus = (Boolean) configuration.get("manageAudioFocus"); + } + setManageAudioFocus(manageAudioFocus); + + String audioMode = null; + if (configuration.get("androidAudioMode") instanceof String) { + audioMode = (String) configuration.get("androidAudioMode"); + } + setAudioMode(audioMode); + + String focusMode = null; + if (configuration.get("androidAudioFocusMode") instanceof String) { + focusMode = (String) configuration.get("androidAudioFocusMode"); + } + setFocusMode(focusMode); + + String streamType = null; + if (configuration.get("androidAudioStreamType") instanceof String) { + streamType = (String) configuration.get("androidAudioStreamType"); + } + setAudioStreamType(streamType); + + String usageType = null; + if (configuration.get("androidAudioAttributesUsageType") instanceof String) { + usageType = (String) configuration.get("androidAudioAttributesUsageType"); + } + setAudioAttributesUsageType(usageType); + + String contentType = null; + if (configuration.get("androidAudioAttributesContentType") instanceof String) { + contentType = (String) configuration.get("androidAudioAttributesContentType"); + } + setAudioAttributesContentType(contentType); + + Boolean forceHandleAudioRouting = null; + if (configuration.get("forceHandleAudioRouting") instanceof Boolean) { + forceHandleAudioRouting = (Boolean) configuration.get("forceHandleAudioRouting"); + } + setForceHandleAudioRouting(forceHandleAudioRouting); + } + + public void setManageAudioFocus(@Nullable Boolean manage) { + if (manage != null && audioSwitch != null) { + this.manageAudioFocus = manage; + Objects.requireNonNull(audioSwitch).setManageAudioFocus(this.manageAudioFocus); + } + } + + public void setAudioMode(@Nullable String audioModeString) { + Integer audioMode = AudioUtils.getAudioModeForString(audioModeString); + + if (audioMode == null) { + return; + } + this.audioMode = audioMode; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioMode(audioMode); + } + } + + public void setFocusMode(@Nullable String focusModeString) { + Integer focusMode = AudioUtils.getFocusModeForString(focusModeString); + + if (focusMode == null) { + return; + } + this.focusMode = focusMode; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setFocusMode(focusMode); + } + } + + public void setAudioStreamType(@Nullable String streamTypeString) { + Integer streamType = AudioUtils.getStreamTypeForString(streamTypeString); + + if (streamType == null) { + return; + } + this.audioStreamType = streamType; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioStreamType(this.audioStreamType); + } + } + + public void setAudioAttributesUsageType(@Nullable String usageTypeString) { + Integer usageType = AudioUtils.getAudioAttributesUsageTypeForString(usageTypeString); + + if (usageType == null) { + return; + } + this.audioAttributeUsageType = usageType; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioAttributeUsageType(this.audioAttributeUsageType); + } + } + + public void setAudioAttributesContentType(@Nullable String contentTypeString) { + Integer contentType = AudioUtils.getAudioAttributesContentTypeFromString(contentTypeString); + + if (contentType == null) { + return; + } + this.audioAttributeContentType = contentType; + if (audioSwitch != null) { + Objects.requireNonNull(audioSwitch).setAudioAttributeContentType(this.audioAttributeContentType); + } + } + + public void setForceHandleAudioRouting(@Nullable Boolean force) { + if (force != null && audioSwitch != null) { + this.forceHandleAudioRouting = force; + Objects.requireNonNull(audioSwitch).setForceHandleAudioRouting(this.forceHandleAudioRouting); + } + } + + public void clearCommunicationDevice() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + audioManager.clearCommunicationDevice(); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java new file mode 100644 index 0000000000..13dd4ba233 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java @@ -0,0 +1,264 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.media.AudioAttributes; +import android.media.AudioDeviceInfo; +import android.media.AudioManager; +import android.os.Build; +import android.util.Log; + +import androidx.annotation.Nullable; + +public class AudioUtils { + + private static final String TAG = "AudioUtils"; + + @Nullable + public static Integer getAudioModeForString(@Nullable String audioModeString) { + if (audioModeString == null) { + return null; + } + + Integer audioMode = null; + switch (audioModeString) { + case "normal": + audioMode = AudioManager.MODE_NORMAL; + break; + case "callScreening": + audioMode = AudioManager.MODE_CALL_SCREENING; + break; + case "inCall": + audioMode = AudioManager.MODE_IN_CALL; + break; + case "inCommunication": + audioMode = AudioManager.MODE_IN_COMMUNICATION; + break; + case "ringtone": + audioMode = AudioManager.MODE_RINGTONE; + break; + default: + Log.w(TAG, "Unknown audio mode: " + audioModeString); + break; + } + + return audioMode; + } + + @Nullable + public static Integer getFocusModeForString(@Nullable String focusModeString) { + if (focusModeString == null) { + return null; + } + + Integer focusMode = null; + switch (focusModeString) { + case "gain": + focusMode = AudioManager.AUDIOFOCUS_GAIN; + break; + case "gainTransient": + focusMode = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT; + break; + case "gainTransientExclusive": + focusMode = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE; + break; + case "gainTransientMayDuck": + focusMode = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; + break; + case "loss": + focusMode = AudioManager.AUDIOFOCUS_LOSS; + break; + default: + Log.w(TAG, "Unknown audio focus mode: " + focusModeString); + break; + } + + return focusMode; + } + + @Nullable + public static Integer getStreamTypeForString(@Nullable String streamTypeString) { + if (streamTypeString == null) { + return null; + } + + Integer streamType = null; + switch (streamTypeString) { + case "accessibility": + streamType = AudioManager.STREAM_ACCESSIBILITY; + break; + case "alarm": + streamType = AudioManager.STREAM_ALARM; + break; + case "dtmf": + streamType = AudioManager.STREAM_DTMF; + break; + case "music": + streamType = AudioManager.STREAM_MUSIC; + break; + case "notification": + streamType = AudioManager.STREAM_NOTIFICATION; + break; + case "ring": + streamType = AudioManager.STREAM_RING; + break; + case "system": + streamType = AudioManager.STREAM_SYSTEM; + break; + case "voiceCall": + streamType = AudioManager.STREAM_VOICE_CALL; + break; + default: + Log.w(TAG, "Unknown audio stream type: " + streamTypeString); + break; + } + + return streamType; + } + + @Nullable + public static Integer getAudioAttributesUsageTypeForString(@Nullable String usageTypeString) { + + if (usageTypeString == null) { + return null; + } + + Integer usageType = null; + switch (usageTypeString) { + case "alarm": + usageType = AudioAttributes.USAGE_ALARM; + break; + case "assistanceAccessibility": + usageType = AudioAttributes.USAGE_ASSISTANCE_ACCESSIBILITY; + break; + case "assistanceNavigationGuidance": + usageType = AudioAttributes.USAGE_ASSISTANCE_NAVIGATION_GUIDANCE; + break; + case "assistanceSonification": + usageType = AudioAttributes.USAGE_ASSISTANCE_SONIFICATION; + break; + case "assistant": + usageType = AudioAttributes.USAGE_ASSISTANT; + break; + case "game": + usageType = AudioAttributes.USAGE_GAME; + break; + case "media": + usageType = AudioAttributes.USAGE_MEDIA; + break; + case "notification": + usageType = AudioAttributes.USAGE_NOTIFICATION; + break; + case "notificationEvent": + usageType = AudioAttributes.USAGE_NOTIFICATION_EVENT; + break; + case "notificationRingtone": + usageType = AudioAttributes.USAGE_NOTIFICATION_RINGTONE; + break; + case "unknown": + usageType = AudioAttributes.USAGE_UNKNOWN; + break; + case "voiceCommunication": + usageType = AudioAttributes.USAGE_VOICE_COMMUNICATION; + break; + case "voiceCommunicationSignalling": + usageType = AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING; + break; + default: + Log.w(TAG, "Unknown audio attributes usage type: " + usageTypeString); + break; + } + + return usageType; + } + + @Nullable + public static Integer getAudioAttributesContentTypeFromString(@Nullable String contentTypeString) { + + if (contentTypeString == null) { + return null; + } + + Integer contentType = null; + switch (contentTypeString) { + case "movie": + contentType = AudioAttributes.CONTENT_TYPE_MOVIE; + break; + case "music": + contentType = AudioAttributes.CONTENT_TYPE_MUSIC; + break; + case "sonification": + contentType = AudioAttributes.CONTENT_TYPE_SONIFICATION; + break; + case "speech": + contentType = AudioAttributes.CONTENT_TYPE_SPEECH; + break; + case "unknown": + contentType = AudioAttributes.CONTENT_TYPE_UNKNOWN; + break; + default: + Log.w(TAG, "Unknown audio attributes content type:" + contentTypeString); + break; + } + + return contentType; + + } + + static public String getAudioDeviceId(AudioDeviceInfo device) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return "audio-1"; + } else { + + String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? "" : device.getAddress(); + String deviceId = "" + device.getId(); + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + deviceId = "microphone-" + address; + } + if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + deviceId = "wired-headset"; + } + if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + deviceId = "bluetooth"; + } + return deviceId; + } + } + + static public String getAudioGroupId(AudioDeviceInfo device) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return "microphone"; + } else { + String groupId = "" + device.getType(); + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + groupId = "microphone"; + } + if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + groupId = "wired-headset"; + } + if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + groupId = "bluetooth"; + } + return groupId; + } + } + + static public String getAudioDeviceLabel(AudioDeviceInfo device) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return "Audio"; + } else { + String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? "" : device.getAddress(); + String label = device.getProductName().toString(); + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + label = "Built-in Microphone (" + address + ")"; + } + + if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + label = "Wired Headset Microphone"; + } + + if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + label = device.getProductName().toString(); + } + return label; + } + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/LocalAudioTrack.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/LocalAudioTrack.java new file mode 100644 index 0000000000..165d15459f --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/LocalAudioTrack.java @@ -0,0 +1,73 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.media.AudioFormat; +import android.os.SystemClock; + +import com.cloudwebrtc.webrtc.LocalTrack; + +import org.webrtc.AudioTrack; +import org.webrtc.AudioTrackSink; +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +/** + * LocalAudioTrack represents an audio track that is sourced from local audio capture. + */ +public class LocalAudioTrack + extends LocalTrack implements JavaAudioDeviceModule.SamplesReadyCallback { + public LocalAudioTrack(AudioTrack audioTrack) { + super(audioTrack); + } + + final List sinks = new ArrayList<>(); + + /** + * Add a sink to receive audio data from this track. + */ + public void addSink(AudioTrackSink sink) { + synchronized (sinks) { + sinks.add(sink); + } + } + + /** + * Remove a sink for this track. + */ + public void removeSink(AudioTrackSink sink) { + synchronized (sinks) { + sinks.remove(sink); + } + } + + private int getBytesPerSample(int audioFormat) { + switch (audioFormat) { + case AudioFormat.ENCODING_PCM_8BIT: + return 1; + case AudioFormat.ENCODING_PCM_16BIT: + case AudioFormat.ENCODING_IEC61937: + case AudioFormat.ENCODING_DEFAULT: + return 2; + case AudioFormat.ENCODING_PCM_FLOAT: + return 4; + default: + throw new IllegalArgumentException("Bad audio format " + audioFormat); + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + int bitsPerSample = getBytesPerSample(audioSamples.getAudioFormat()) * 8; + int numFrames = audioSamples.getSampleRate() / 100; + long timestamp = SystemClock.elapsedRealtime(); + synchronized (sinks) { + for (AudioTrackSink sink : sinks) { + ByteBuffer byteBuffer = ByteBuffer.wrap(audioSamples.getData()); + sink.onData(byteBuffer, bitsPerSample, audioSamples.getSampleRate(), + audioSamples.getChannelCount(), numFrames, timestamp); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java new file mode 100644 index 0000000000..25195104fc --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java @@ -0,0 +1,32 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.util.ArrayList; +import java.util.List; + +public class PlaybackSamplesReadyCallbackAdapter + implements JavaAudioDeviceModule.PlaybackSamplesReadyCallback { + public PlaybackSamplesReadyCallbackAdapter() {} + + List callbacks = new ArrayList<>(); + + public void addCallback(JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.add(callback); + } + } + + public void removeCallback(JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.remove(callback); + } + } + + @Override + public void onWebRtcAudioTrackSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + for (JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback : callbacks) { + callback.onWebRtcAudioTrackSamplesReady(audioSamples); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/RecordSamplesReadyCallbackAdapter.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/RecordSamplesReadyCallbackAdapter.java new file mode 100644 index 0000000000..959062a930 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/RecordSamplesReadyCallbackAdapter.java @@ -0,0 +1,34 @@ +package com.cloudwebrtc.webrtc.audio; + +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.util.ArrayList; +import java.util.List; + +public class RecordSamplesReadyCallbackAdapter + implements JavaAudioDeviceModule.SamplesReadyCallback { + public RecordSamplesReadyCallbackAdapter() {} + + List callbacks = new ArrayList<>(); + + public void addCallback(JavaAudioDeviceModule.SamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.add(callback); + } + } + + public void removeCallback(JavaAudioDeviceModule.SamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.remove(callback); + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + synchronized (callbacks) { + for (JavaAudioDeviceModule.SamplesReadyCallback callback : callbacks) { + callback.onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioChannel.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioChannel.java new file mode 100644 index 0000000000..5b9a033476 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioChannel.java @@ -0,0 +1,6 @@ +package com.cloudwebrtc.webrtc.record; + +public enum AudioChannel { + INPUT, + OUTPUT +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java new file mode 100644 index 0000000000..ddc4d1ff2f --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java @@ -0,0 +1,34 @@ +package com.cloudwebrtc.webrtc.record; + +import android.annotation.SuppressLint; + +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioSamples; + +import java.util.HashMap; + +/** JavaAudioDeviceModule allows attaching samples callback only on building + * We don't want to instantiate VideoFileRenderer and codecs at this step + * It's simple dummy class, it does nothing until samples are necessary */ +@SuppressWarnings("WeakerAccess") +public class AudioSamplesInterceptor implements SamplesReadyCallback { + + @SuppressLint("UseSparseArrays") + protected final HashMap callbacks = new HashMap<>(); + + @Override + public void onWebRtcAudioRecordSamplesReady(AudioSamples audioSamples) { + for (SamplesReadyCallback callback : callbacks.values()) { + callback.onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + + public void attachCallback(Integer id, SamplesReadyCallback callback) throws Exception { + callbacks.put(id, callback); + } + + public void detachCallback(Integer id) { + callbacks.remove(id); + } + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioTrackInterceptor.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioTrackInterceptor.java new file mode 100644 index 0000000000..4f71082100 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioTrackInterceptor.java @@ -0,0 +1,112 @@ +package com.cloudwebrtc.webrtc.record; + +import android.annotation.TargetApi; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.os.Build; + +import org.webrtc.audio.JavaAudioDeviceModule.AudioSamples; +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; + +import java.nio.ByteBuffer; + +import androidx.annotation.NonNull; + +/** + * Wrapper around audio track + * Intercepts write calls and passes it to callback + * **/ +public final class AudioTrackInterceptor extends AudioTrack { + final public AudioTrack originalTrack; + final private SamplesReadyCallback callback; + + public AudioTrackInterceptor(@NonNull AudioTrack originalTrack, @NonNull SamplesReadyCallback callback) { + // That just random params, we don't care about object that will be created + super( + AudioManager.STREAM_VOICE_CALL, + 44200, + AudioFormat.CHANNEL_OUT_MONO, + AudioFormat.ENCODING_PCM_16BIT, + 128, + AudioTrack.MODE_STREAM + ); + this.originalTrack = originalTrack; + this.callback = callback; + } + + @Override + public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) { + callback.onWebRtcAudioRecordSamplesReady(new AudioSamples( + originalTrack.getAudioFormat(), + originalTrack.getChannelCount(), + originalTrack.getSampleRate(), + audioData + )); + return originalTrack.write(audioData, offsetInBytes, sizeInBytes); + } + + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + @Override + public int write(@NonNull ByteBuffer audioData, int sizeInBytes, int writeMode) { + byte[] trimmed = new byte[sizeInBytes]; + int position = audioData.position(); + audioData.get(trimmed, 0, sizeInBytes); + audioData.position(position); + callback.onWebRtcAudioRecordSamplesReady(new AudioSamples( + originalTrack.getAudioFormat(), + originalTrack.getChannelCount(), + originalTrack.getSampleRate(), + trimmed + )); + return originalTrack.write(audioData, sizeInBytes, writeMode); + } + + /** + * Override all required calls to mimic original track + * https://webrtc.googlesource.com/src/+/master/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java + * **/ + + @Override + public int getPlayState() { + return originalTrack.getPlayState(); + } + + @Override + public void play() throws IllegalStateException { + originalTrack.play(); + } + + @Override + public void stop() throws IllegalStateException { + originalTrack.stop(); + } + + @TargetApi(Build.VERSION_CODES.N) + @Override + public int getUnderrunCount() { + return originalTrack.getUnderrunCount(); + } + + @TargetApi(Build.VERSION_CODES.N) + @Override + public int getBufferCapacityInFrames() { + return originalTrack.getBufferCapacityInFrames(); + } + + @TargetApi(Build.VERSION_CODES.M) + @Override + public int getBufferSizeInFrames() { + return originalTrack.getBufferSizeInFrames(); + } + + @Override + public void release() { + originalTrack.release(); + } + + @Override + public int getPlaybackHeadPosition() { + return originalTrack.getPlaybackHeadPosition(); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java new file mode 100644 index 0000000000..fb48c68a15 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java @@ -0,0 +1,128 @@ +package com.cloudwebrtc.webrtc.record; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.ImageFormat; +import android.graphics.Matrix; +import android.graphics.Rect; +import android.graphics.YuvImage; +import android.os.Handler; +import android.os.Looper; + +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; +import org.webrtc.VideoTrack; +import org.webrtc.YuvHelper; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; + +import io.flutter.plugin.common.MethodChannel; + +public class FrameCapturer implements VideoSink { + private final VideoTrack videoTrack; + private File file; + private final MethodChannel.Result callback; + private boolean gotFrame = false; + + public FrameCapturer(VideoTrack track, File file, MethodChannel.Result callback) { + videoTrack = track; + this.file = file; + this.callback = callback; + track.addSink(this); + } + + @Override + public void onFrame(VideoFrame videoFrame) { + if (gotFrame) + return; + gotFrame = true; + videoFrame.retain(); + VideoFrame.Buffer buffer = videoFrame.getBuffer(); + VideoFrame.I420Buffer i420Buffer = buffer.toI420(); + ByteBuffer y = i420Buffer.getDataY(); + ByteBuffer u = i420Buffer.getDataU(); + ByteBuffer v = i420Buffer.getDataV(); + int width = i420Buffer.getWidth(); + int height = i420Buffer.getHeight(); + int[] strides = new int[] { + i420Buffer.getStrideY(), + i420Buffer.getStrideU(), + i420Buffer.getStrideV() + }; + final int chromaWidth = (width + 1) / 2; + final int chromaHeight = (height + 1) / 2; + final int minSize = width * height + chromaWidth * chromaHeight * 2; + + ByteBuffer yuvBuffer = ByteBuffer.allocateDirect(minSize); + // NV21 is the same as NV12, only that V and U are stored in the reverse oder + // NV21 (YYYYYYYYY:VUVU) + // NV12 (YYYYYYYYY:UVUV) + // Therefore we can use the NV12 helper, but swap the U and V input buffers + YuvHelper.I420ToNV12(y, strides[0], v, strides[2], u, strides[1], yuvBuffer, width, height); + + // For some reason the ByteBuffer may have leading 0. We remove them as + // otherwise the + // image will be shifted + byte[] cleanedArray = Arrays.copyOfRange(yuvBuffer.array(), yuvBuffer.arrayOffset(), minSize); + + YuvImage yuvImage = new YuvImage( + cleanedArray, + ImageFormat.NV21, + width, + height, + // We omit the strides here. If they were included, the resulting image would + // have its colors offset. + null); + i420Buffer.release(); + videoFrame.release(); + new Handler(Looper.getMainLooper()).post(() -> { + videoTrack.removeSink(this); + }); + try { + if (!file.exists()) { + //noinspection ResultOfMethodCallIgnored + file.getParentFile().mkdirs(); + //noinspection ResultOfMethodCallIgnored + file.createNewFile(); + } + } catch (IOException io) { + callback.error("IOException", io.getLocalizedMessage(), io); + return; + } + try (FileOutputStream outputStream = new FileOutputStream(file)) { + yuvImage.compressToJpeg( + new Rect(0, 0, width, height), + 100, + outputStream + ); + switch (videoFrame.getRotation()) { + case 0: + break; + case 90: + case 180: + case 270: + Bitmap original = BitmapFactory.decodeFile(file.toString()); + Matrix matrix = new Matrix(); + matrix.postRotate(videoFrame.getRotation()); + Bitmap rotated = Bitmap.createBitmap(original, 0, 0, original.getWidth(), original.getHeight(), matrix, true); + FileOutputStream rotatedOutputStream = new FileOutputStream(file); + rotated.compress(Bitmap.CompressFormat.JPEG, 100, rotatedOutputStream); + break; + default: + // Rotation is checked to always be 0, 90, 180 or 270 by VideoFrame + throw new RuntimeException("Invalid rotation"); + } + callback.success(null); + } catch (IOException io) { + callback.error("IOException", io.getLocalizedMessage(), io); + } catch (IllegalArgumentException iae) { + callback.error("IllegalArgumentException", iae.getLocalizedMessage(), iae); + } finally { + file = null; + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java new file mode 100644 index 0000000000..f1c45357bc --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java @@ -0,0 +1,67 @@ +package com.cloudwebrtc.webrtc.record; + +import androidx.annotation.Nullable; +import android.util.Log; + +import com.cloudwebrtc.webrtc.utils.EglUtils; + +import org.webrtc.VideoTrack; + +import java.io.File; + +public class MediaRecorderImpl { + + private final Integer id; + private final VideoTrack videoTrack; + private final AudioSamplesInterceptor audioInterceptor; + private VideoFileRenderer videoFileRenderer; + private boolean isRunning = false; + private File recordFile; + + public MediaRecorderImpl(Integer id, @Nullable VideoTrack videoTrack, @Nullable AudioSamplesInterceptor audioInterceptor) { + this.id = id; + this.videoTrack = videoTrack; + this.audioInterceptor = audioInterceptor; + } + + public void startRecording(File file) throws Exception { + recordFile = file; + if (isRunning) + return; + isRunning = true; + //noinspection ResultOfMethodCallIgnored + file.getParentFile().mkdirs(); + if (videoTrack != null) { + videoFileRenderer = new VideoFileRenderer( + file.getAbsolutePath(), + EglUtils.getRootEglBaseContext(), + audioInterceptor != null + ); + videoTrack.addSink(videoFileRenderer); + if (audioInterceptor != null) + audioInterceptor.attachCallback(id, videoFileRenderer); + } else { + Log.e(TAG, "Video track is null"); + if (audioInterceptor != null) { + //TODO(rostopira): audio only recording + throw new Exception("Audio-only recording not implemented yet"); + } + } + } + + public File getRecordFile() { return recordFile; } + + public void stopRecording() { + isRunning = false; + if (audioInterceptor != null) + audioInterceptor.detachCallback(id); + if (videoTrack != null && videoFileRenderer != null) { + videoTrack.removeSink(videoFileRenderer); + videoFileRenderer.release(); + videoFileRenderer = null; + } + } + + private static final String TAG = "MediaRecorderImpl"; + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/OutputAudioSamplesInterceptor.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/OutputAudioSamplesInterceptor.java new file mode 100644 index 0000000000..7628a096be --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/OutputAudioSamplesInterceptor.java @@ -0,0 +1,27 @@ +package com.cloudwebrtc.webrtc.record; + +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.audio.WebRtcAudioTrackUtils; + +public class OutputAudioSamplesInterceptor extends AudioSamplesInterceptor { + private final JavaAudioDeviceModule audioDeviceModule; + + public OutputAudioSamplesInterceptor(JavaAudioDeviceModule audioDeviceModule) { + super(); + this.audioDeviceModule = audioDeviceModule; + } + + @Override + public void attachCallback(Integer id, JavaAudioDeviceModule.SamplesReadyCallback callback) throws Exception { + if (callbacks.isEmpty()) + WebRtcAudioTrackUtils.attachOutputCallback(this, audioDeviceModule); + super.attachCallback(id, callback); + } + + @Override + public void detachCallback(Integer id) { + super.detachCallback(id); + if (callbacks.isEmpty()) + WebRtcAudioTrackUtils.detachOutputCallback(audioDeviceModule); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java new file mode 100644 index 0000000000..030c36bf16 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java @@ -0,0 +1,325 @@ +// Modifications by Signify, Copyright 2025, Signify Holding - SPDX-License-Identifier: MIT + +package com.cloudwebrtc.webrtc.record; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Log; +import android.view.Surface; + +import org.webrtc.EglBase; +import org.webrtc.GlRectDrawer; +import org.webrtc.VideoFrame; +import org.webrtc.VideoFrameDrawer; +import org.webrtc.VideoSink; +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.CountDownLatch; + +class VideoFileRenderer implements VideoSink, SamplesReadyCallback { + private static final String TAG = "VideoFileRenderer"; + private final HandlerThread renderThread; + private final Handler renderThreadHandler; + private final HandlerThread audioThread; + private final Handler audioThreadHandler; + private int outputFileWidth = -1; + private int outputFileHeight = -1; + private ByteBuffer[] encoderOutputBuffers; + private ByteBuffer[] audioInputBuffers; + private ByteBuffer[] audioOutputBuffers; + private EglBase eglBase; + private final EglBase.Context sharedContext; + private VideoFrameDrawer frameDrawer; + + // TODO: these ought to be configurable as well + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding + private static final int FRAME_RATE = 30; // 30fps + private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames + + private final MediaMuxer mediaMuxer; + private MediaCodec encoder; + private final MediaCodec.BufferInfo bufferInfo; + private MediaCodec.BufferInfo audioBufferInfo; + private int trackIndex = -1; + private int audioTrackIndex; + private boolean isRunning = true; + private GlRectDrawer drawer; + private Surface surface; + private MediaCodec audioEncoder; + + VideoFileRenderer(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException { + renderThread = new HandlerThread(TAG + "RenderThread"); + renderThread.start(); + renderThreadHandler = new Handler(renderThread.getLooper()); + if (withAudio) { + audioThread = new HandlerThread(TAG + "AudioThread"); + audioThread.start(); + audioThreadHandler = new Handler(audioThread.getLooper()); + } else { + audioThread = null; + audioThreadHandler = null; + } + bufferInfo = new MediaCodec.BufferInfo(); + this.sharedContext = sharedContext; + + // Create a MediaMuxer. We can't add the video track and start() the muxer here, + // because our MediaFormat doesn't have the Magic Goodies. These can only be + // obtained from the encoder after it has started processing data. + mediaMuxer = new MediaMuxer(outputFile, + MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + audioTrackIndex = withAudio ? -1 : 0; + } + + private void initVideoEncoder() { + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight); + + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, + MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + + // Create a MediaCodec encoder, and configure it with our format. Get a Surface + // we can use for input and wrap it with a class that handles the EGL work. + try { + encoder = MediaCodec.createEncoderByType(MIME_TYPE); + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + renderThreadHandler.post(() -> { + eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE); + surface = encoder.createInputSurface(); + eglBase.createSurface(surface); + eglBase.makeCurrent(); + drawer = new GlRectDrawer(); + }); + } catch (Exception e) { + Log.wtf(TAG, e); + } + } + + @Override + public void onFrame(VideoFrame frame) { + frame.retain(); + if (outputFileWidth == -1) { + outputFileWidth = frame.getRotatedWidth(); + outputFileHeight = frame.getRotatedHeight(); + initVideoEncoder(); + } + renderThreadHandler.post(() -> renderFrameOnRenderThread(frame)); + } + + private void renderFrameOnRenderThread(VideoFrame frame) { + if (frameDrawer == null) { + frameDrawer = new VideoFrameDrawer(); + } + frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight); + frame.release(); + drainEncoder(); + eglBase.swapBuffers(); + } + + /** + * Release all resources. All already posted frames will be rendered first. + */ + // Start Signify modification + void release() { + isRunning = false; + CountDownLatch latch = new CountDownLatch(audioThreadHandler != null ? 2 : 1); + if (audioThreadHandler != null) { + audioThreadHandler.post(() -> { + try{ + if (audioEncoder != null) { + audioEncoder.stop(); + audioEncoder.release(); + } + audioThread.quit(); + } finally { + latch.countDown(); + } + }); + } + + renderThreadHandler.post(() -> { + try { + if (encoder != null) { + encoder.stop(); + encoder.release(); + } + eglBase.release(); + if (muxerStarted) { + mediaMuxer.stop(); + mediaMuxer.release(); + muxerStarted = false; + } + renderThread.quit(); + } finally { + latch.countDown(); + } + }); + + try { + latch.await(); + } catch (InterruptedException e) { + Log.e(TAG, "Release interrupted", e); + Thread.currentThread().interrupt(); + } + } + // End Signify modification + + private boolean encoderStarted = false; + private volatile boolean muxerStarted = false; + private long videoFrameStart = 0; + + private void drainEncoder() { + if (!encoderStarted) { + encoder.start(); + encoderOutputBuffers = encoder.getOutputBuffers(); + encoderStarted = true; + return; + } + while (true) { + int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + break; + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = encoder.getOutputBuffers(); + Log.e(TAG, "encoder output buffers changed"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // not expected for an encoder + MediaFormat newFormat = encoder.getOutputFormat(); + + Log.e(TAG, "encoder output format changed: " + newFormat); + trackIndex = mediaMuxer.addTrack(newFormat); + if (trackIndex != -1 && !muxerStarted) { + mediaMuxer.start(); + muxerStarted = true; + } + if (!muxerStarted) + break; + } else if (encoderStatus < 0) { + Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus); + } else { // encoderStatus >= 0 + try { + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null"); + break; + } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. + encodedData.position(bufferInfo.offset); + encodedData.limit(bufferInfo.offset + bufferInfo.size); + if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) { + videoFrameStart = bufferInfo.presentationTimeUs; + } + bufferInfo.presentationTimeUs -= videoFrameStart; + if (muxerStarted) + mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo); + isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0; + encoder.releaseOutputBuffer(encoderStatus, false); + if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + break; + } + } catch (Exception e) { + Log.wtf(TAG, e); + break; + } + } + } + } + + private long presTime = 0L; + + private void drainAudio() { + if (audioBufferInfo == null) + audioBufferInfo = new MediaCodec.BufferInfo(); + while (true) { + int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + break; + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + audioOutputBuffers = audioEncoder.getOutputBuffers(); + Log.w(TAG, "encoder output buffers changed"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // not expected for an encoder + MediaFormat newFormat = audioEncoder.getOutputFormat(); + + Log.w(TAG, "encoder output format changed: " + newFormat); + audioTrackIndex = mediaMuxer.addTrack(newFormat); + if (audioTrackIndex != -1 && !muxerStarted) { + mediaMuxer.start(); + muxerStarted = true; + } + if (!muxerStarted) + break; + } else if (encoderStatus < 0) { + Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus); + } else { // encoderStatus >= 0 + try { + ByteBuffer encodedData = audioOutputBuffers[encoderStatus]; + if (encodedData == null) { + Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null"); + break; + } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. + encodedData.position(audioBufferInfo.offset); + encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size); + if (muxerStarted) + mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo); + isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0; + audioEncoder.releaseOutputBuffer(encoderStatus, false); + if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + break; + } + } catch (Exception e) { + Log.wtf(TAG, e); + break; + } + } + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + if (!isRunning) + return; + audioThreadHandler.post(() -> { + if (audioEncoder == null) try { + audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm"); + MediaFormat format = new MediaFormat(); + format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); + format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount()); + format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate()); + format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024); + format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + audioEncoder.start(); + audioInputBuffers = audioEncoder.getInputBuffers(); + audioOutputBuffers = audioEncoder.getOutputBuffers(); + } catch (IOException exception) { + Log.wtf(TAG, exception); + } + int bufferIndex = audioEncoder.dequeueInputBuffer(0); + if (bufferIndex >= 0) { + ByteBuffer buffer = audioInputBuffers[bufferIndex]; + buffer.clear(); + byte[] data = audioSamples.getData(); + buffer.put(data); + audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0); + presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes + } + drainAudio(); + }); + } + +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadResult.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadResult.java new file mode 100644 index 0000000000..e1de0e46c1 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadResult.java @@ -0,0 +1,38 @@ +package com.cloudwebrtc.webrtc.utils; + +import android.os.Looper; +import android.os.Handler; + +import io.flutter.plugin.common.MethodChannel; + +public final class AnyThreadResult implements MethodChannel.Result { + final private MethodChannel.Result result; + final private Handler handler = new Handler(Looper.getMainLooper()); + + public AnyThreadResult(MethodChannel.Result result) { + this.result = result; + } + + @Override + public void success(Object o) { + post(()->result.success(o)); + } + + @Override + public void error(String s, String s1, Object o) { + post(()->result.error(s, s1, o)); + } + + @Override + public void notImplemented() { + post(result::notImplemented); + } + + private void post(Runnable r) { + if(Looper.getMainLooper() == Looper.myLooper()){ + r.run(); + }else{ + handler.post(r); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadSink.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadSink.java new file mode 100644 index 0000000000..ad0bce2fb4 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadSink.java @@ -0,0 +1,38 @@ +package com.cloudwebrtc.webrtc.utils; + +import android.os.Handler; +import android.os.Looper; + +import io.flutter.plugin.common.EventChannel; + +public final class AnyThreadSink implements EventChannel.EventSink { + final private EventChannel.EventSink eventSink; + final private Handler handler = new Handler(Looper.getMainLooper()); + + public AnyThreadSink(EventChannel.EventSink eventSink) { + this.eventSink = eventSink; + } + + @Override + public void success(Object o) { + post(()->eventSink.success(o)); + } + + @Override + public void error(String s, String s1, Object o) { + post(()->eventSink.error(s, s1, o)); + } + + @Override + public void endOfStream() { + post(eventSink::endOfStream); + } + + private void post(Runnable r) { + if(Looper.getMainLooper() == Looper.myLooper()){ + r.run(); + }else{ + handler.post(r); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java index e9005d6b9e..a74409a3df 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java @@ -2,5 +2,5 @@ public interface Callback { - public void invoke(Object... args); + void invoke(Object... args); } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java index c994c74adb..c9dd0089df 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java @@ -39,6 +39,10 @@ public String getString(int index){ return (String) mArray.get(index); } + public Byte[] getByte(int index){ + return (Byte[]) mArray.get(index); + } + public ConstraintsArray getArray(int index){ return new ConstraintsArray((ArrayList)mArray.get(index)); } @@ -64,6 +68,8 @@ public ObjectType getType(int index) { return ObjectType.Array; } else if (object instanceof Map) { return ObjectType.Map; + } else if (object instanceof Byte) { + return ObjectType.Byte; } return ObjectType.Null; } @@ -96,6 +102,10 @@ public void pushArray(ConstraintsArray array){ mArray.add(array.toArrayList()); } + public void pushByte(byte[] value){ + mArray.add(value); + } + public void pushMap(ConstraintsMap map){ mArray.add(map.toMap()); } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java index 8320ca03f7..5a6d8b6d3b 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java @@ -48,7 +48,11 @@ public String getString(String name){ } public ConstraintsMap getMap(String name){ - return new ConstraintsMap((Map)mMap.get(name)); + Object value = mMap.get(name); + if (value == null) { + return null; + } + return new ConstraintsMap((Map) value); } public ObjectType getType(String name) { @@ -65,8 +69,10 @@ public ObjectType getType(String name) { return ObjectType.Map; } else if (value instanceof ArrayList) { return ObjectType.Array; + } else if (value instanceof Byte) { + return ObjectType.Byte; } else { - throw new IllegalArgumentException("Invalid value " + value.toString() + " for key " + name + + throw new IllegalArgumentException("Invalid value " + value + " for key " + name + "contained in ConstraintsMap"); } } @@ -83,10 +89,18 @@ public void putInt(String key, int value) { mMap.put(key, value); } + public void putLong(String key, long value) { + mMap.put(key, value); + } + public void putString(String key, String value) { mMap.put(key, value); } + public void putByte(String key, byte[] value) { + mMap.put(key, value); + } + public void putNull(String key) { mMap.put(key, null); } @@ -104,10 +118,21 @@ public void putArray(String key, ArrayList value) { } public ConstraintsArray getArray(String name){ - return new ConstraintsArray((ArrayList)mMap.get(name)); + Object value = mMap.get(name); + if (value == null) { + return null; + } + return new ConstraintsArray((ArrayList) value); } public ArrayList getListArray(String name){ return (ArrayList) mMap.get(name); } + + @Override + public String toString() { + return "ConstraintsMap{" + + "mMap=" + mMap + + '}'; + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java index a5f95b6cd6..8291e97fc0 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java @@ -1,17 +1,14 @@ package com.cloudwebrtc.webrtc.utils; -import android.util.Log; +import android.os.Build; import org.webrtc.EglBase; -import org.webrtc.EglBase10; -import org.webrtc.EglBase14; public class EglUtils { /** * The root {@link EglBase} instance shared by the entire application for * the sake of reducing the utilization of system resources (such as EGL - * contexts). It selects between {@link EglBase10} and {@link EglBase14} - * by performing a runtime check. + * contexts). */ private static EglBase rootEglBase; @@ -21,43 +18,10 @@ public class EglUtils { */ public static synchronized EglBase getRootEglBase() { if (rootEglBase == null) { - // XXX EglBase14 will report that isEGL14Supported() but its - // getEglConfig() will fail with a RuntimeException with message - // "Unable to find any matching EGL config". Fall back to EglBase10 - // in the described scenario. - EglBase eglBase = null; - int[] configAttributes = EglBase.CONFIG_PLAIN; - RuntimeException cause = null; - - try { - if (EglBase14.isEGL14Supported()) { - eglBase - = new EglBase14( - /* sharedContext */ null, - configAttributes); - } - } catch (RuntimeException ex) { - // Fall back to EglBase10. - cause = ex; - } - - if (eglBase == null) { - try { - eglBase - = new EglBase10( - /* sharedContext */ null, - configAttributes); - } catch (RuntimeException ex) { - // Neither EglBase14, nor EglBase10 succeeded to initialize. - cause = ex; - } - } - - if (cause != null) { - Log.e(EglUtils.class.getName(), "Failed to create EglBase", cause); - } else { - rootEglBase = eglBase; - } + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) + rootEglBase = EglBase.createEgl10(EglBase.CONFIG_PLAIN); + else + rootEglBase = EglBase.create(); } return rootEglBase; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java new file mode 100644 index 0000000000..3ba4ae9824 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java @@ -0,0 +1,97 @@ +package com.cloudwebrtc.webrtc.utils; + +import android.util.Log; +import java.util.List; +import java.util.Map.Entry; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaConstraints.KeyValuePair; + +public class MediaConstraintsUtils { + + static public final String TAG = "MediaConstraintsUtils"; + + /** + * Parses mandatory and optional "GUM" constraints described by a specific + * ConstraintsMap. + * + * @param constraints A ConstraintsMap which represents a JavaScript object specifying + * the constraints to be parsed into a + * MediaConstraints instance. + * @return A new MediaConstraints instance initialized with the mandatory and optional + * constraint keys and values specified by + * constraints. + */ + public static MediaConstraints parseMediaConstraints(ConstraintsMap constraints) { + MediaConstraints mediaConstraints = new MediaConstraints(); + + // TODO: change getUserMedia constraints format to support new syntax + // constraint format seems changed, and there is no mandatory any more. + // and has a new syntax/attrs to specify resolution + // should change `parseConstraints()` according + // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints + if (constraints.hasKey("mandatory") + && constraints.getType("mandatory") == ObjectType.Map) { + parseConstraints(constraints.getMap("mandatory"), + mediaConstraints.mandatory); + } else { + Log.d(TAG, "mandatory constraints are not a map"); + } + + if (constraints.hasKey("optional") + && constraints.getType("optional") == ObjectType.Array) { + ConstraintsArray optional = constraints.getArray("optional"); + + for (int i = 0, size = optional.size(); i < size; i++) { + if (optional.getType(i) == ObjectType.Map) { + parseConstraints( + optional.getMap(i), + mediaConstraints.optional); + } + } + } else { + Log.d(TAG, "optional constraints are not an array"); + } + + return mediaConstraints; + } + + /** + * Parses a constraint set specified in the form of a JavaScript object into a specific + * List of MediaConstraints.KeyValuePairs. + * + * @param src The constraint set in the form of a JavaScript object to parse. + * @param dst The List of MediaConstraints.KeyValuePairs into which the + * specified src is to be parsed. + */ + private static void parseConstraints( + ConstraintsMap src, + List dst) { + + for (Entry entry : src.toMap().entrySet()) { + String key = entry.getKey(); + String value = getMapStrValue(src, entry.getKey()); + dst.add(new KeyValuePair(key, value)); + } + } + + private static String getMapStrValue(ConstraintsMap map, String key) { + if (!map.hasKey(key)) { + return null; + } + ObjectType type = map.getType(key); + switch (type) { + case Boolean: + return String.valueOf(map.getBoolean(key)); + case Number: + // Don't know how to distinguish between Int and Double from + // ReadableType.Number. 'getInt' will fail on double value, + // while 'getDouble' works for both. + // return String.valueOf(map.getInt(key)); + return String.valueOf(map.getDouble(key)); + case String: + return map.getString(key); + default: + return null; + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java index 26e6bb79c5..481603d775 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java @@ -7,4 +7,5 @@ public enum ObjectType { String, Map, Array, + Byte } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java index 066dc6ebde..5e9c8f6033 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java @@ -3,294 +3,224 @@ import android.app.Activity; import android.app.Fragment; import android.app.FragmentTransaction; +import android.content.Context; import android.content.pm.PackageManager; import android.os.Build; +import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.ResultReceiver; -import android.support.v4.content.ContextCompat; - -import com.cloudwebrtc.webrtc.FlutterWebRTCPlugin; +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; +import androidx.core.app.ActivityCompat; import java.util.ArrayList; -/** - * Helper module for dealing with dynamic permissions, introduced in Android M - * (API level 23). - */ +/** Helper module for dealing with dynamic permissions, introduced in Android M (API level 23). */ public class PermissionUtils { - /** - * Constants for internal fields in the Bundle exchanged between - * the activity requesting the permissions and the auxiliary activity we - * spawn for this purpose. - */ - private static final String GRANT_RESULTS = "GRANT_RESULT"; - private static final String PERMISSIONS = "PERMISSION"; - private static final String REQUEST_CODE = "REQUEST_CODE"; - private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; - - /** - * Incrementing counter for permission requests. Each request must have a - * unique numeric code. - */ - private static int requestCode; - - - private static void maybeRequestPermissionsOnHostResume( - final FlutterWebRTCPlugin plugin, - final String[] permissions, - int[] grantResults, - final ResultReceiver resultReceiver, - int requestCode) { - - /* - if (!(context instanceof ReactContext)) { - // I do not know how to wait for an Activity here. - send(resultReceiver, requestCode, permissions, grantResults); - return; - } - - final Context reactContext = (Context) context; - reactContext.addLifecycleEventListener( - new LifecycleEventListener() { - @Override - public void onHostDestroy() { - } - - @Override - public void onHostPause() { - } - - @Override - public void onHostResume() { - reactContext.removeLifecycleEventListener(this); - requestPermissions(context, permissions, resultReceiver); - } - }); - */ + /** + * Constants for internal fields in the Bundle exchanged between the activity requesting + * the permissions and the auxiliary activity we spawn for this purpose. + */ + private static final String GRANT_RESULTS = "GRANT_RESULT"; + + private static final String PERMISSIONS = "PERMISSION"; + private static final String REQUEST_CODE = "REQUEST_CODE"; + private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; + + /** Incrementing counter for permission requests. Each request must have a unique numeric code. */ + private static int requestCode; + + private static void requestPermissions( + Context context, Activity activity, String[] permissions, ResultReceiver resultReceiver) { + // Ask the Context whether we have already been granted the requested + // permissions. + int size = permissions.length; + int[] grantResults = new int[size]; + boolean permissionsGranted = true; + + for (int i = 0; i < size; ++i) { + int grantResult; + // No need to ask for permission on pre-Marshmallow + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) + grantResult = PackageManager.PERMISSION_GRANTED; + else if (activity != null){ + grantResult = activity.checkSelfPermission(permissions[i]); + } else { + grantResult = ActivityCompat.checkSelfPermission(context, permissions[i]); + } + + grantResults[i] = grantResult; + if (grantResult != PackageManager.PERMISSION_GRANTED) { + permissionsGranted = false; + } } - private static void requestPermissions( - FlutterWebRTCPlugin plugin, - String[] permissions, - ResultReceiver resultReceiver) { - // Ask the Context whether we have already been granted the requested - // permissions. - int size = permissions.length; - int[] grantResults = new int[size]; - boolean permissionsGranted = true; - - for (int i = 0; i < size; ++i) { - int grantResult - = ContextCompat.checkSelfPermission( - plugin.getContext(), - permissions[i]); - - grantResults[i] = grantResult; - if (grantResult != PackageManager.PERMISSION_GRANTED) { - permissionsGranted = false; - } - } - - // Obviously, if the requested permissions have already been granted, - // there is nothing to ask the user about. On the other hand, if there - // is no Activity or the runtime permissions are not supported, there is - // no way to ask the user to grant us the denied permissions. - int requestCode = ++PermissionUtils.requestCode; - - if (permissionsGranted - // Here we test for the target SDK version with which *the app* - // was compiled. If we use Build.VERSION.SDK_INT that would give - // us the API version of the device itself, not the version the - // app was compiled for. When compiled for API level < 23 we - // must still use old permissions model, regardless of the - // Android version on the device. - || Build.VERSION.SDK_INT < Build.VERSION_CODES.M - || plugin.getActivity().getApplicationInfo().targetSdkVersion - < Build.VERSION_CODES.M) { - send(resultReceiver, requestCode, permissions, grantResults); - return; - } - - Activity activity = plugin.getActivity(); + // Obviously, if the requested permissions have already been granted, + // there is nothing to ask the user about. On the other hand, if there + // is no Activity or the runtime permissions are not supported, there is + // no way to ask the user to grant us the denied permissions. + int requestCode = ++PermissionUtils.requestCode; + + if (permissionsGranted + // Here we test for the target SDK version with which *the app* + // was compiled. If we use Build.VERSION.SDK_INT that would give + // us the API version of the device itself, not the version the + // app was compiled for. When compiled for API level < 23 we + // must still use old permissions model, regardless of the + // Android version on the device. + || Build.VERSION.SDK_INT < Build.VERSION_CODES.M + || context.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.M) { + send(resultReceiver, requestCode, permissions, grantResults); + return; + } - // If a ReactContext does not have a current Activity, then wait for - // it to get a current Activity; otherwise, the user will not be asked - // about the denied permissions and getUserMedia will fail. - if (activity == null) { - maybeRequestPermissionsOnHostResume( - plugin, - permissions, - grantResults, - resultReceiver, - requestCode); - return; + Bundle args = new Bundle(); + args.putInt(REQUEST_CODE, requestCode); + args.putParcelable(RESULT_RECEIVER, resultReceiver); + args.putStringArray(PERMISSIONS, permissions); + + RequestPermissionsFragment fragment = new RequestPermissionsFragment(); + fragment.setArguments(args); + + if(activity != null){ + FragmentTransaction transaction = + activity + .getFragmentManager() + .beginTransaction() + .add(fragment, fragment.getClass().getName() + "-" + requestCode); + + try { + transaction.commit(); + } catch (IllegalStateException ise) { + // Context is a Plugin, just send result back. + send(resultReceiver, requestCode, permissions, grantResults); + } + } + } + + public static void requestPermissions( + final Context context, + final Activity activity, + final String[] permissions, + final Callback callback) { + requestPermissions( + context, + activity, + permissions, + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int resultCode, Bundle resultData) { + callback.invoke( + resultData.getStringArray(PERMISSIONS), resultData.getIntArray(GRANT_RESULTS)); + } + }); + } + + private static void send( + ResultReceiver resultReceiver, int requestCode, String[] permissions, int[] grantResults) { + Bundle resultData = new Bundle(); + resultData.putStringArray(PERMISSIONS, permissions); + resultData.putIntArray(GRANT_RESULTS, grantResults); + + resultReceiver.send(requestCode, resultData); + } + + public interface Callback { + void invoke(String[] permissions, int[] grantResults); + } + + /** + * Helper activity for requesting permissions. Android only allows requesting permissions from an + * activity and the result is reported in the onRequestPermissionsResult method. Since + * this package is a library we create an auxiliary activity and communicate back the results + * using a ResultReceiver. + */ + @RequiresApi(api = VERSION_CODES.M) + public static class RequestPermissionsFragment extends Fragment { + private void checkSelfPermissions(boolean requestPermissions) { + // Figure out which of the requested permissions are actually denied + // because we do not want to ask about the granted permissions + // (which Android supports). + Bundle args = getArguments(); + String[] permissions = args.getStringArray(PERMISSIONS); + int size = permissions.length; + Activity activity = getActivity(); + int[] grantResults = new int[size]; + ArrayList deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < size; ++i) { + String permission = permissions[i]; + int grantResult; + // No need to ask for permission on pre-Marshmallow + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) + grantResult = PackageManager.PERMISSION_GRANTED; + else grantResult = activity.checkSelfPermission(permission); + + grantResults[i] = grantResult; + if (grantResult != PackageManager.PERMISSION_GRANTED) { + deniedPermissions.add(permission); } + } + int requestCode = args.getInt(REQUEST_CODE, 0); - Bundle args = new Bundle(); - args.putInt(REQUEST_CODE, requestCode); - args.putParcelable(RESULT_RECEIVER, resultReceiver); - args.putStringArray(PERMISSIONS, permissions); - - RequestPermissionsFragment fragment = new RequestPermissionsFragment(); - fragment.setArguments(args); - fragment.setPlugin(plugin); - - FragmentTransaction transaction - = activity.getFragmentManager().beginTransaction().add( - fragment, - fragment.getClass().getName() + "-" + requestCode); - - try { - transaction.commit(); - } catch (IllegalStateException ise) { - // The Activity has likely already saved its state. - maybeRequestPermissionsOnHostResume( - plugin, - permissions, - grantResults, - resultReceiver, - requestCode); - } - } - - public static void requestPermissions( - final FlutterWebRTCPlugin plugin, - final String[] permissions, - final Callback callback) { + if (deniedPermissions.isEmpty() || !requestPermissions) { + // All permissions have already been granted or we cannot ask + // the user about the denied ones. + finish(); + send(args.getParcelable(RESULT_RECEIVER), requestCode, permissions, grantResults); + } else { + // Ask the user about the denied permissions. requestPermissions( - plugin, - permissions, - new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult( - int resultCode, - Bundle resultData) { - callback.invoke( - resultData.getStringArray(PERMISSIONS), - resultData.getIntArray(GRANT_RESULTS)); - } - }); + deniedPermissions.toArray(new String[deniedPermissions.size()]), requestCode); + } } - private static void send( - ResultReceiver resultReceiver, - int requestCode, - String[] permissions, - int[] grantResults) { - Bundle resultData = new Bundle(); - resultData.putStringArray(PERMISSIONS, permissions); - resultData.putIntArray(GRANT_RESULTS, grantResults); + private void finish() { + Activity activity = getActivity(); - resultReceiver.send(requestCode, resultData); + if (activity != null) { + activity.getFragmentManager().beginTransaction().remove(this).commitAllowingStateLoss(); + } } - public interface Callback { - void invoke(String[] permissions, int[] grantResults); + @Override + public void onRequestPermissionsResult( + int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { + Bundle args = getArguments(); + + if (args.getInt(REQUEST_CODE, 0) != requestCode) { + return; + } + + // XXX The super's documentation says: It is possible that the + // permissions request interaction with the user is interrupted. In + // this case you will receive empty permissions and results arrays + // which should be treated as a cancellation. + if (permissions.length == 0 || grantResults.length == 0) { + // The getUserMedia algorithm does not define a way to cancel + // the invocation so we have to redo the permission request. + finish(); + PermissionUtils.requestPermissions( + getContext(), + getActivity(), + args.getStringArray(PERMISSIONS), + (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); + } else { + // We did not ask for all requested permissions, just the denied + // ones. But when we send the result, we have to answer about + // all requested permissions. + checkSelfPermissions(/* requestPermissions */ false); + } } - /** - * Helper activity for requesting permissions. Android only allows - * requesting permissions from an activity and the result is reported in the - * onRequestPermissionsResult method. Since this package is a - * library we create an auxiliary activity and communicate back the results - * using a ResultReceiver. - */ - public static class RequestPermissionsFragment extends Fragment { - private FlutterWebRTCPlugin plugin; - - public void setPlugin(FlutterWebRTCPlugin plugin){ - this.plugin = plugin; - } - private void checkSelfPermissions(boolean requestPermissions) { - // Figure out which of the requested permissions are actually denied - // because we do not want to ask about the granted permissions - // (which Android supports). - Bundle args = getArguments(); - String[] permissions = args.getStringArray(PERMISSIONS); - int size = permissions.length; - Activity activity = getActivity(); - int[] grantResults = new int[size]; - ArrayList deniedPermissions = new ArrayList<>(); - - for (int i = 0; i < size; ++i) { - String permission = permissions[i]; - int grantResult = activity.checkSelfPermission(permission); - - grantResults[i] = grantResult; - if (grantResult != PackageManager.PERMISSION_GRANTED) { - deniedPermissions.add(permission); - } - } - - int requestCode = args.getInt(REQUEST_CODE, 0); - - if (deniedPermissions.isEmpty() || !requestPermissions) { - // All permissions have already been granted or we cannot ask - // the user about the denied ones. - finish(); - send( - (ResultReceiver) args.getParcelable(RESULT_RECEIVER), - requestCode, - permissions, - grantResults); - } else { - // Ask the user about the denied permissions. - requestPermissions( - deniedPermissions.toArray( - new String[deniedPermissions.size()]), - requestCode); - } - } - - private void finish() { - Activity activity = getActivity(); - - if (activity != null) { - activity.getFragmentManager().beginTransaction() - .remove(this) - .commitAllowingStateLoss(); - } - } - - @Override - public void onRequestPermissionsResult( - int requestCode, - String[] permissions, - int[] grantResults) { - Bundle args = getArguments(); + @Override + public void onResume() { + super.onResume(); - if (args.getInt(REQUEST_CODE, 0) != requestCode) { - return; - } - - // XXX The super's documentation says: It is possible that the - // permissions request interaction with the user is interrupted. In - // this case you will receive empty permissions and results arrays - // which should be treated as a cancellation. - if (permissions.length == 0 || grantResults.length == 0) { - // The getUserMedia algorithm does not define a way to cancel - // the invocation so we have to redo the permission request. - finish(); - PermissionUtils.requestPermissions( - plugin, - args.getStringArray(PERMISSIONS), - (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); - } else { - // We did not ask for all requested permissions, just the denied - // ones. But when we send the result, we have to answer about - // all requested permissions. - checkSelfPermissions(/* requestPermissions */ false); - } - } - - @Override - public void onResume() { - super.onResume(); - - checkSelfPermissions(/* requestPermissions */ true); - } + checkSelfPermissions(/* requestPermissions */ true); } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java new file mode 100644 index 0000000000..b990ca4e24 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java @@ -0,0 +1,80 @@ +package com.cloudwebrtc.webrtc.utils; + +import androidx.annotation.Nullable; + +import org.webrtc.PeerConnection; + +public class Utils { + + @Nullable + static public String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { + switch (iceConnectionState) { + case NEW: + return "new"; + case CHECKING: + return "checking"; + case CONNECTED: + return "connected"; + case COMPLETED: + return "completed"; + case FAILED: + return "failed"; + case DISCONNECTED: + return "disconnected"; + case CLOSED: + return "closed"; + } + return null; + } + + @Nullable + static public String iceGatheringStateString(PeerConnection.IceGatheringState iceGatheringState) { + switch (iceGatheringState) { + case NEW: + return "new"; + case GATHERING: + return "gathering"; + case COMPLETE: + return "complete"; + } + return null; + } + + @Nullable + static public String signalingStateString(PeerConnection.SignalingState signalingState) { + switch (signalingState) { + case STABLE: + return "stable"; + case HAVE_LOCAL_OFFER: + return "have-local-offer"; + case HAVE_LOCAL_PRANSWER: + return "have-local-pranswer"; + case HAVE_REMOTE_OFFER: + return "have-remote-offer"; + case HAVE_REMOTE_PRANSWER: + return "have-remote-pranswer"; + case CLOSED: + return "closed"; + } + return null; + } + + @Nullable + static public String connectionStateString(PeerConnection.PeerConnectionState connectionState) { + switch (connectionState) { + case NEW: + return "new"; + case CONNECTING: + return "connecting"; + case CONNECTED: + return "connected"; + case DISCONNECTED: + return "disconnected"; + case FAILED: + return "failed"; + case CLOSED: + return "closed"; + } + return null; + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/LocalVideoTrack.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/LocalVideoTrack.java new file mode 100644 index 0000000000..fde5a75f04 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/LocalVideoTrack.java @@ -0,0 +1,67 @@ +package com.cloudwebrtc.webrtc.video; + +import androidx.annotation.Nullable; + +import com.cloudwebrtc.webrtc.LocalTrack; + +import org.webrtc.VideoFrame; +import org.webrtc.VideoProcessor; +import org.webrtc.VideoSink; +import org.webrtc.VideoTrack; + +import java.util.ArrayList; +import java.util.List; + +public class LocalVideoTrack extends LocalTrack implements VideoProcessor { + public interface ExternalVideoFrameProcessing { + /** + * Process a video frame. + * @param frame + * @return The processed video frame. + */ + public abstract VideoFrame onFrame(VideoFrame frame); + } + + public LocalVideoTrack(VideoTrack videoTrack) { + super(videoTrack); + } + + List processors = new ArrayList<>(); + + public void addProcessor(ExternalVideoFrameProcessing processor) { + synchronized (processors) { + processors.add(processor); + } + } + + public void removeProcessor(ExternalVideoFrameProcessing processor) { + synchronized (processors) { + processors.remove(processor); + } + } + + private VideoSink sink = null; + + @Override + public void setSink(@Nullable VideoSink videoSink) { + sink = videoSink; + } + + @Override + public void onCapturerStarted(boolean b) {} + + @Override + public void onCapturerStopped() {} + + @Override + public void onFrameCaptured(VideoFrame videoFrame) { + if (sink != null) { + synchronized (processors) { + for (ExternalVideoFrameProcessing processor : processors) { + videoFrame = processor.onFrame(videoFrame); + } + } + sink.onFrame(videoFrame); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/VideoCapturerInfo.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/VideoCapturerInfo.java new file mode 100644 index 0000000000..8d93e61578 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/VideoCapturerInfo.java @@ -0,0 +1,12 @@ +package com.cloudwebrtc.webrtc.video; + +import org.webrtc.VideoCapturer; + +public class VideoCapturerInfo { + public VideoCapturer capturer; + public int width; + public int height; + public int fps; + public boolean isScreenCapture = false; + public String cameraName; +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraRegionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraRegionUtils.java new file mode 100644 index 0000000000..62581de564 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraRegionUtils.java @@ -0,0 +1,205 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package com.cloudwebrtc.webrtc.video.camera; + +import android.annotation.TargetApi; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.util.Size; +import androidx.annotation.NonNull; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import java.util.Arrays; + +/** + * Utility class offering functions to calculate values regarding the camera boundaries. + * + *

The functions are used to calculate focus and exposure settings. + */ +public final class CameraRegionUtils { + + @NonNull + public static Size getCameraBoundaries( + @NonNull CameraCharacteristics cameraCharacteristics, @NonNull CaptureRequest.Builder requestBuilder) { + if (SdkCapabilityChecker.supportsDistortionCorrection() + && supportsDistortionCorrection(cameraCharacteristics)) { + // Get the current distortion correction mode. + Integer distortionCorrectionMode = + requestBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE); + + // Return the correct boundaries depending on the mode. + android.graphics.Rect rect; + if (distortionCorrectionMode == null + || distortionCorrectionMode == CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) { + rect = getSensorInfoPreCorrectionActiveArraySize(cameraCharacteristics); + } else { + rect = getSensorInfoActiveArraySize(cameraCharacteristics); + } + + return SizeFactory.create(rect.width(), rect.height()); + } else { + // No distortion correction support. + return getSensorInfoPixelArraySize(cameraCharacteristics); + } + } + + @TargetApi(Build.VERSION_CODES.P) + private static boolean supportsDistortionCorrection(CameraCharacteristics cameraCharacteristics) { + int[] availableDistortionCorrectionModes = getDistortionCorrectionAvailableModes(cameraCharacteristics); + if (availableDistortionCorrectionModes == null) { + availableDistortionCorrectionModes = new int[0]; + } + long nonOffModesSupported = + Arrays.stream(availableDistortionCorrectionModes) + .filter((value) -> value != CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) + .count(); + return nonOffModesSupported > 0; + } + + static public int[] getDistortionCorrectionAvailableModes(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + return cameraCharacteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES); + } + return null; + } + + public static Rect getSensorInfoActiveArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + } + + public static Size getSensorInfoPixelArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + } + + @NonNull + public static Rect getSensorInfoPreCorrectionActiveArraySize(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return cameraCharacteristics.get( + CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); + } + return getSensorInfoActiveArraySize(cameraCharacteristics); + } + + public static Integer getControlMaxRegionsAutoExposure(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE); + } + + /** + * Converts a point into a {@link MeteringRectangle} with the supplied coordinates as the center + * point. + * + *

Since the Camera API (due to cross-platform constraints) only accepts a point when + * configuring a specific focus or exposure area and Android requires a rectangle to configure + * these settings there is a need to convert the point into a rectangle. This method will create + * the required rectangle with an arbitrarily size that is a 10th of the current viewport and the + * coordinates as the center point. + * + * @param boundaries - The camera boundaries to calculate the metering rectangle for. + * @param x x - 1 >= coordinate >= 0. + * @param y y - 1 >= coordinate >= 0. + * @return The dimensions of the metering rectangle based on the supplied coordinates and + * boundaries. + */ + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + /** Factory class that assists in creating a {@link MeteringRectangle} instance. */ + static class MeteringRectangleFactory { + /** + * Creates a new instance of the {@link MeteringRectangle} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param x coordinate >= 0. + * @param y coordinate >= 0. + * @param width width >= 0. + * @param height height >= 0. + * @param meteringWeight weight between {@value MeteringRectangle#METERING_WEIGHT_MIN} and + * {@value MeteringRectangle#METERING_WEIGHT_MAX} inclusively. + * @return new instance of the {@link MeteringRectangle} class. + * @throws IllegalArgumentException if any of the parameters were negative. + */ + @VisibleForTesting + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } + + /** Factory class that assists in creating a {@link Size} instance. */ + static class SizeFactory { + /** + * Creates a new instance of the {@link Size} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param width width >= 0. + * @param height height >= 0. + * @return new instance of the {@link Size} class. + */ + @VisibleForTesting + public static Size create(int width, int height) { + return new Size(width, height); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraUtils.java new file mode 100644 index 0000000000..12802ce1d0 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/CameraUtils.java @@ -0,0 +1,730 @@ +package com.cloudwebrtc.webrtc.video.camera; + +import android.app.Activity; +import android.graphics.Rect; +import android.hardware.Camera; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.os.Handler; +import android.util.Log; +import android.util.Range; +import android.util.Size; +import android.view.Surface; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.cloudwebrtc.webrtc.GetUserMediaImpl; +import com.cloudwebrtc.webrtc.utils.AnyThreadResult; +import com.cloudwebrtc.webrtc.video.VideoCapturerInfo; + +import org.webrtc.Camera1Capturer; +import org.webrtc.Camera2Capturer; +import org.webrtc.CameraEnumerationAndroid; + +import java.lang.reflect.Field; +import java.util.List; + +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel; + +public class CameraUtils { + private static final String TAG = "CameraUtils"; + Activity activity; + private GetUserMediaImpl getUserMediaImpl; + private boolean isTorchOn = false; + private DeviceOrientationManager deviceOrientationManager; + public CameraUtils(GetUserMediaImpl getUserMediaImpl, Activity activity) { + this.getUserMediaImpl = getUserMediaImpl; + this.activity = activity; + this.deviceOrientationManager = new DeviceOrientationManager(activity, 0); + // commented out because you cannot register a reciever when the app is terminated + // because the activity is null? + // this causes the call to break if the app is terminated + // the manager seems to end up at handleOrientationChange which does not do + // anything at the moment so this should be ok + + // TODO: get a proper fix at some point + // this.deviceOrientationManager.start(); + } + + public void setFocusMode(MethodCall call, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + switch (mode) { + case "locked": + // When locking the auto-focus the camera device should do a one-time focus and afterwards + // set the auto-focus to idle. This is accomplished by setting the CONTROL_AF_MODE to + // CONTROL_AF_MODE_AUTO. + captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); + break; + case "auto": + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); + break; + default: + break; + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + + //captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(!params.getSupportedFocusModes().isEmpty()) { + switch (mode) { + case "locked": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); + break; + case "auto": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); + break; + default: + break; + } + result.success(null); + return; + } + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setFocusPoint(MethodCall call, Point focusPoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + MeteringRectangle focusRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + focusRectangle = + convertPointToMeteringRectangle(cameraBoundaries, focusPoint.x, focusPoint.y, orientation); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_REGIONS, + captureRequestBuilder == null ? null : new MeteringRectangle[] {focusRectangle}); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + + result.success(null); + return; + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setExposureMode(MethodCall call, AnyThreadResult result) {} + + public void setExposurePoint(MethodCall call,Point exposurePoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setExposurePoint", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setExposurePoint", "[setExposurePoint] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + if(CameraRegionUtils.getControlMaxRegionsAutoExposure(cameraCharacteristics) <= 0) { + resultError("setExposurePoint", "[setExposurePoint] Camera does not support auto exposure", result); + return; + } + + MeteringRectangle exposureRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + exposureRectangle = + convertPointToMeteringRectangle(cameraBoundaries, exposurePoint.x, exposurePoint.y, orientation); + if (exposureRectangle != null) { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {exposureRectangle}); + } else { + MeteringRectangle[] defaultRegions = captureRequestBuilder.get(CaptureRequest.CONTROL_AE_REGIONS); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, defaultRegions); + } + + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void hasTorch(String trackId, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("hasTorch", "Video capturer not found for id: " + trackId, result); + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && info.capturer instanceof Camera2Capturer) { + CameraManager manager; + CameraDevice cameraDevice; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + boolean flashIsAvailable; + try { + CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraDevice.getId()); + flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(flashIsAvailable); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + List supportedModes = params.getSupportedFlashModes(); + + result.success( + supportedModes != null && supportedModes.contains(Camera.Parameters.FLASH_MODE_TORCH)); + return; + } + + resultError("hasTorch", "[TORCH] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setZoom(String trackId, double zoomLevel, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setZoom", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + final double maxZoomLevel = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); + + final double desiredZoomLevel = Math.max(1.0, Math.min(zoomLevel, maxZoomLevel)); + + float ratio = 1.0f / (float)desiredZoomLevel; + + if (rect != null) { + int croppedWidth = rect.width() - Math.round((float) rect.width() * ratio); + int croppedHeight = rect.height() - Math.round((float) rect.height() * ratio); + final Rect desiredRegion = new Rect(croppedWidth / 2, croppedHeight / 2, rect.width() - croppedWidth / 2, rect.height() - croppedHeight / 2); + captureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, desiredRegion); + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(params.isZoomSupported()) { + int maxZoom = params.getMaxZoom(); + double desiredZoom = Math.max(0, Math.min(zoomLevel, maxZoom)); + params.setZoom((int)desiredZoom); + result.success(null); + return; + } + } + resultError("setZoom", "[ZOOM] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setTorch(String trackId, boolean torch, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setTorch", "Video capturer not found for id: " + trackId, result); + return; + } + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + CameraManager manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(null); + isTorchOn = torch; + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + + result.success(null); + isTorchOn = torch; + return; + } + resultError("setTorch", "[TORCH] Video capturer not compatible", result); + } + + + private class NoSuchFieldWithNameException extends NoSuchFieldException { + + String className; + String fieldName; + + NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { + super(e.getMessage()); + this.className = className; + this.fieldName = fieldName; + } + } + static private void resultError(String method, String error, MethodChannel.Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + private Object getPrivateProperty(Class klass, Object object, String fieldName) + throws NoSuchFieldWithNameException { + try { + Field field = klass.getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(object); + } catch (NoSuchFieldException e) { + throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); + } catch (IllegalAccessException e) { + // Should never happen since we are calling `setAccessible(true)` + throw new RuntimeException(e); + } + } + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + static class MeteringRectangleFactory { + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } +} + diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/DeviceOrientationManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/DeviceOrientationManager.java new file mode 100644 index 0000000000..c533291893 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/DeviceOrientationManager.java @@ -0,0 +1,188 @@ +package com.cloudwebrtc.webrtc.video.camera; + +import android.app.Activity; +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.res.Configuration; +import android.view.Display; +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation; + +/** + * Support class to help to determine the media orientation based on the orientation of the device. + */ +public class DeviceOrientationManager { + + private static final IntentFilter orientationIntentFilter = + new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED); + + private final Activity activity; + private final int sensorOrientation; + private PlatformChannel.DeviceOrientation lastOrientation; + private BroadcastReceiver broadcastReceiver; + + /** Factory method to create a device orientation manager. */ + @NonNull + public static DeviceOrientationManager create( + @NonNull Activity activity, + int sensorOrientation) { + return new DeviceOrientationManager(activity, sensorOrientation); + } + + DeviceOrientationManager( + @NonNull Activity activity, + int sensorOrientation) { + this.activity = activity; + this.sensorOrientation = sensorOrientation; + } + + public void start() { + if (broadcastReceiver != null) { + return; + } + broadcastReceiver = + new BroadcastReceiver() { + @Override + public void onReceive(Context context, Intent intent) { + handleUIOrientationChange(); + } + }; + activity.registerReceiver(broadcastReceiver, orientationIntentFilter); + broadcastReceiver.onReceive(activity, null); + } + + /** Stops listening for orientation updates. */ + public void stop() { + if (broadcastReceiver == null) { + return; + } + activity.unregisterReceiver(broadcastReceiver); + broadcastReceiver = null; + } + + + /** @return the last received UI orientation. */ + @Nullable + public PlatformChannel.DeviceOrientation getLastUIOrientation() { + return this.lastOrientation; + } + + /** + * Handles orientation changes based on change events triggered by the OrientationIntentFilter. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + */ + @VisibleForTesting + void handleUIOrientationChange() { + PlatformChannel.DeviceOrientation orientation = getUIOrientation(); + handleOrientationChange(orientation, lastOrientation); + lastOrientation = orientation; + } + @VisibleForTesting + static void handleOrientationChange( + DeviceOrientation newOrientation, + DeviceOrientation previousOrientation) { + } + + @SuppressWarnings("deprecation") + @VisibleForTesting + PlatformChannel.DeviceOrientation getUIOrientation() { + final int rotation = getDisplay().getRotation(); + final int orientation = activity.getResources().getConfiguration().orientation; + + switch (orientation) { + case Configuration.ORIENTATION_PORTRAIT: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } else { + return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN; + } + case Configuration.ORIENTATION_LANDSCAPE: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT; + } else { + return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT; + } + case Configuration.ORIENTATION_SQUARE: + case Configuration.ORIENTATION_UNDEFINED: + default: + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } + } + + /** + * Calculates the sensor orientation based on the supplied angle. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @param angle Orientation angle. + * @return The sensor orientation based on the supplied angle. + */ + @VisibleForTesting + PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) { + final int tolerance = 45; + angle += tolerance; + + // Orientation is 0 in the default orientation mode. This is portrait-mode for phones + // and landscape for tablets. We have to compensate for this by calculating the default + // orientation, and apply an offset accordingly. + int defaultDeviceOrientation = getDeviceDefaultOrientation(); + if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) { + angle += 90; + } + // Determine the orientation + angle = angle % 360; + return new PlatformChannel.DeviceOrientation[] { + PlatformChannel.DeviceOrientation.PORTRAIT_UP, + PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT, + PlatformChannel.DeviceOrientation.PORTRAIT_DOWN, + PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT, + } + [angle / 90]; + } + + /** + * Gets the default orientation of the device. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return The default orientation of the device. + */ + @VisibleForTesting + int getDeviceDefaultOrientation() { + Configuration config = activity.getResources().getConfiguration(); + int rotation = getDisplay().getRotation(); + if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) + && config.orientation == Configuration.ORIENTATION_LANDSCAPE) + || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) + && config.orientation == Configuration.ORIENTATION_PORTRAIT)) { + return Configuration.ORIENTATION_LANDSCAPE; + } else { + return Configuration.ORIENTATION_PORTRAIT; + } + } + + /** + * Gets an instance of the Android {@link android.view.Display}. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return An instance of the Android {@link android.view.Display}. + */ + @SuppressWarnings("deprecation") + @VisibleForTesting + Display getDisplay() { + return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/Point.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/Point.java new file mode 100644 index 0000000000..83ab8e653d --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/Point.java @@ -0,0 +1,14 @@ +package com.cloudwebrtc.webrtc.video.camera; + +import androidx.annotation.Nullable; + +/** Represents a point on an x/y axis. */ +public class Point { + public final Double x; + public final Double y; + + public Point(@Nullable Double x, @Nullable Double y) { + this.x = x; + this.y = y; + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/SdkCapabilityChecker.java b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/SdkCapabilityChecker.java new file mode 100644 index 0000000000..cd7d21ef3f --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/video/camera/SdkCapabilityChecker.java @@ -0,0 +1,60 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package com.cloudwebrtc.webrtc.video.camera; + +import android.annotation.SuppressLint; +import android.os.Build; +import androidx.annotation.ChecksSdkIntAtLeast; +import androidx.annotation.VisibleForTesting; + +/** Abstracts SDK version checks, and allows overriding them in unit tests. */ +public class SdkCapabilityChecker { + /** The current SDK version, overridable for testing. */ + @SuppressLint("AnnotateVersionCheck") + @VisibleForTesting + public static int SDK_VERSION = Build.VERSION.SDK_INT; + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsDistortionCorrection() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.O) + public static boolean supportsEglRecordableAndroid() { + // See https://developer.android.com/reference/android/opengl/EGLExt#EGL_RECORDABLE_ANDROID + return SDK_VERSION >= Build.VERSION_CODES.O; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.S) + public static boolean supportsEncoderProfiles() { + // See https://developer.android.com/reference/android/media/EncoderProfiles + return SDK_VERSION >= Build.VERSION_CODES.S; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.M) + public static boolean supportsMarshmallowNoiseReductionModes() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES + return SDK_VERSION >= Build.VERSION_CODES.M; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsSessionConfiguration() { + // See https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.N) + public static boolean supportsVideoPause() { + // See https://developer.android.com/reference/androidx/camera/video/VideoRecordEvent.Pause + return SDK_VERSION >= Build.VERSION_CODES.N; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.R) + public static boolean supportsZoomRatio() { + // See https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#CONTROL_ZOOM_RATIO + return SDK_VERSION >= Build.VERSION_CODES.R; + } +} diff --git a/android/src/main/java/org/webrtc/Camera1Helper.java b/android/src/main/java/org/webrtc/Camera1Helper.java new file mode 100644 index 0000000000..f0dec0d8de --- /dev/null +++ b/android/src/main/java/org/webrtc/Camera1Helper.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023-2024 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +/** + * A helper to access package-protected methods used in [Camera2Session] + *

+ * Note: cameraId as used in the Camera1XXX classes refers to the index within the list of cameras. + * + * @suppress + */ + +public class Camera1Helper { + + public static int getCameraId(String deviceName) { + return Camera1Enumerator.getCameraIndex(deviceName); + } + + @Nullable + public static List getSupportedFormats(int cameraId) { + return Camera1Enumerator.getSupportedFormats(cameraId); + } + + public static Size findClosestCaptureFormat(int cameraId, int width, int height) { + List formats = getSupportedFormats(cameraId); + + List sizes = new ArrayList<>(); + if (formats != null) { + for (CameraEnumerationAndroid.CaptureFormat format : formats) { + sizes.add(new Size(format.width, format.height)); + } + } + + return CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + } +} diff --git a/android/src/main/java/org/webrtc/Camera2Helper.java b/android/src/main/java/org/webrtc/Camera2Helper.java new file mode 100644 index 0000000000..eab20edb2e --- /dev/null +++ b/android/src/main/java/org/webrtc/Camera2Helper.java @@ -0,0 +1,51 @@ +/* + * Copyright 2023-2024 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import android.hardware.camera2.CameraManager; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +/** + * A helper to access package-protected methods used in [Camera2Session] + *

+ * Note: cameraId as used in the Camera2XXX classes refers to the id returned + * by [CameraManager.getCameraIdList]. + */ +public class Camera2Helper { + + @Nullable + public static List getSupportedFormats(CameraManager cameraManager, @Nullable String cameraId) { + return Camera2Enumerator.getSupportedFormats(cameraManager, cameraId); + } + + public static Size findClosestCaptureFormat(CameraManager cameraManager, @Nullable String cameraId, int width, int height) { + List formats = getSupportedFormats(cameraManager, cameraId); + + List sizes = new ArrayList<>(); + if (formats != null) { + for (CameraEnumerationAndroid.CaptureFormat format : formats) { + sizes.add(new Size(format.width, format.height)); + } + } + + return CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + } +} diff --git a/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java b/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java new file mode 100644 index 0000000000..4aba9d7a0e --- /dev/null +++ b/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java @@ -0,0 +1,58 @@ +package org.webrtc.audio; + +import android.media.AudioTrack; +import android.util.Log; + +import com.cloudwebrtc.webrtc.record.AudioTrackInterceptor; + +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; + +import java.lang.reflect.Field; + +/** + * Awful hack + * It must be in this package, because WebRtcAudioTrack is package-private + * **/ +public abstract class WebRtcAudioTrackUtils { + + static private final String TAG = "WebRtcAudioTrackUtils"; + + public static void attachOutputCallback( + SamplesReadyCallback callback, + JavaAudioDeviceModule audioDeviceModule + ) throws NoSuchFieldException, IllegalAccessException, NullPointerException { + Field audioOutputField = audioDeviceModule.getClass().getDeclaredField("audioOutput"); + audioOutputField.setAccessible(true); + WebRtcAudioTrack audioOutput = (WebRtcAudioTrack) audioOutputField.get(audioDeviceModule); + Log.w(TAG, "Here is a little hedgehog 🦔"); + Field audioTrackField = audioOutput.getClass().getDeclaredField("audioTrack"); + audioTrackField.setAccessible(true); + AudioTrack audioTrack = (AudioTrack) audioTrackField.get(audioOutput); + Log.w(TAG, "He is hiding in a forest 🌲🦔🌲"); + AudioTrackInterceptor interceptor = new AudioTrackInterceptor(audioTrack, callback); + audioTrackField.set(audioOutput, interceptor); + Log.w(TAG, "Little hedgie in the forest 🌲🌲🌲 but you can't see him"); + } + + public static void detachOutputCallback(JavaAudioDeviceModule audioDeviceModule) { + try { + Log.w(TAG, "Where did the hedgie gone? Let's find him"); + Field audioOutputField = audioDeviceModule.getClass().getDeclaredField("audioOutput"); + audioOutputField.setAccessible(true); + WebRtcAudioTrack audioOutput = (WebRtcAudioTrack) audioOutputField.get(audioDeviceModule); + Field audioTrackField = audioOutput.getClass().getDeclaredField("audioTrack"); + audioTrackField.setAccessible(true); + AudioTrack audioTrack = (AudioTrack) audioTrackField.get(audioOutput); + if (audioTrack instanceof AudioTrackInterceptor) { + AudioTrackInterceptor interceptor = (AudioTrackInterceptor) audioTrack; + audioTrackField.set(audioOutput, interceptor.originalTrack); + Log.w(TAG, "Here he is 🦔"); + } else { + Log.w(TAG, "Hedgie is lost 😢"); + } + } catch (Exception e) { + Log.w(TAG, "Failed to detach callback", e); + } + } + +} diff --git a/android/src/main/java/org/webrtc/video/CustomVideoDecoderFactory.java b/android/src/main/java/org/webrtc/video/CustomVideoDecoderFactory.java new file mode 100644 index 0000000000..531314ac3e --- /dev/null +++ b/android/src/main/java/org/webrtc/video/CustomVideoDecoderFactory.java @@ -0,0 +1,55 @@ +package org.webrtc.video; + +import androidx.annotation.Nullable; + +import org.webrtc.EglBase; +import org.webrtc.SoftwareVideoDecoderFactory; +import org.webrtc.VideoCodecInfo; +import org.webrtc.VideoDecoder; +import org.webrtc.VideoDecoderFactory; +import org.webrtc.WrappedVideoDecoderFactory; + +import java.util.ArrayList; +import java.util.List; + +public class CustomVideoDecoderFactory implements VideoDecoderFactory { + private SoftwareVideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + private WrappedVideoDecoderFactory wrappedVideoDecoderFactory; + private boolean forceSWCodec = false; + + private List forceSWCodecs = new ArrayList<>(); + + public CustomVideoDecoderFactory(EglBase.Context sharedContext) { + this.wrappedVideoDecoderFactory = new WrappedVideoDecoderFactory(sharedContext); + } + + public void setForceSWCodec(boolean forceSWCodec) { + this.forceSWCodec = forceSWCodec; + } + + public void setForceSWCodecList(List forceSWCodecs) { + this.forceSWCodecs = forceSWCodecs; + } + + @Nullable + @Override + public VideoDecoder createDecoder(VideoCodecInfo videoCodecInfo) { + if(forceSWCodec) { + return softwareVideoDecoderFactory.createDecoder(videoCodecInfo); + } + if(!forceSWCodecs.isEmpty()) { + if(forceSWCodecs.contains(videoCodecInfo.name)) { + return softwareVideoDecoderFactory.createDecoder(videoCodecInfo); + } + } + return wrappedVideoDecoderFactory.createDecoder(videoCodecInfo); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + if(forceSWCodec && forceSWCodecs.isEmpty()) { + return softwareVideoDecoderFactory.getSupportedCodecs(); + } + return wrappedVideoDecoderFactory.getSupportedCodecs(); + } +} diff --git a/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java b/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java new file mode 100644 index 0000000000..772b3f936c --- /dev/null +++ b/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java @@ -0,0 +1,61 @@ +package org.webrtc.video; + +import androidx.annotation.Nullable; + +import com.cloudwebrtc.webrtc.SimulcastVideoEncoderFactoryWrapper; + +import org.webrtc.EglBase; +import org.webrtc.SoftwareVideoEncoderFactory; +import org.webrtc.VideoCodecInfo; +import org.webrtc.VideoEncoder; +import org.webrtc.VideoEncoderFactory; + +import java.util.ArrayList; +import java.util.List; + +public class CustomVideoEncoderFactory implements VideoEncoderFactory { + private SoftwareVideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory(); + private SimulcastVideoEncoderFactoryWrapper simulcastVideoEncoderFactoryWrapper; + + private boolean forceSWCodec = false; + + private List forceSWCodecs = new ArrayList<>(); + + public CustomVideoEncoderFactory(EglBase.Context sharedContext, + boolean enableIntelVp8Encoder, + boolean enableH264HighProfile) { + this.simulcastVideoEncoderFactoryWrapper = new SimulcastVideoEncoderFactoryWrapper(sharedContext, enableIntelVp8Encoder, enableH264HighProfile); + } + + public void setForceSWCodec(boolean forceSWCodec) { + this.forceSWCodec = forceSWCodec; + } + + public void setForceSWCodecList(List forceSWCodecs) { + this.forceSWCodecs = forceSWCodecs; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo videoCodecInfo) { + if(forceSWCodec) { + return softwareVideoEncoderFactory.createEncoder(videoCodecInfo); + } + + if(!forceSWCodecs.isEmpty()) { + if(forceSWCodecs.contains(videoCodecInfo.name)) { + return softwareVideoEncoderFactory.createEncoder(videoCodecInfo); + } + } + + return simulcastVideoEncoderFactoryWrapper.createEncoder(videoCodecInfo); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + if(forceSWCodec && forceSWCodecs.isEmpty()) { + return softwareVideoEncoderFactory.getSupportedCodecs(); + } + return simulcastVideoEncoderFactoryWrapper.getSupportedCodecs(); + } +} diff --git a/assets/sponsors/stream-logo.png b/assets/sponsors/stream-logo.png new file mode 100644 index 0000000000..671eea96df Binary files /dev/null and b/assets/sponsors/stream-logo.png differ diff --git a/common/cpp/include/flutter_common.h b/common/cpp/include/flutter_common.h new file mode 100644 index 0000000000..50e6097bf9 --- /dev/null +++ b/common/cpp/include/flutter_common.h @@ -0,0 +1,188 @@ +#ifndef FLUTTER_WEBRTC_COMMON_HXX +#define FLUTTER_WEBRTC_COMMON_HXX + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +typedef flutter::EncodableValue EncodableValue; +typedef flutter::EncodableMap EncodableMap; +typedef flutter::EncodableList EncodableList; +typedef flutter::BinaryMessenger BinaryMessenger; +typedef flutter::TextureRegistrar TextureRegistrar; +typedef flutter::PluginRegistrar PluginRegistrar; +typedef flutter::MethodChannel MethodChannel; +typedef flutter::EventChannel EventChannel; +typedef flutter::EventSink EventSink; +typedef flutter::MethodCall MethodCall; +typedef flutter::MethodResult MethodResult; + +class TaskRunner; + +// foo.StringValue() becomes std::get(foo) +// foo.IsString() becomes std::holds_alternative(foo) + +template +inline bool TypeIs(const EncodableValue val) { + return std::holds_alternative(val); +} + +template +inline const T GetValue(EncodableValue val) { + return std::get(val); +} + +inline EncodableValue findEncodableValue(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end()) + return it->second; + return EncodableValue(); +} + +inline EncodableMap findMap(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return EncodableMap(); +} + +inline EncodableList findList(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return EncodableList(); +} + +inline std::string findString(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return std::string(); +} + +inline int findInt(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return -1; +} + +inline bool findBoolean(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return false; +} + +inline double findDouble(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return 0.0; +} + +inline std::optional maybeFindDouble(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return std::nullopt; +} + +inline std::vector findVector(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs>(it->second)) + return GetValue>(it->second); + return std::vector(); +} + +inline int64_t findLongInt(const EncodableMap& map, const std::string& key) { + for (auto it : map) { + if (key == GetValue(it.first)) { + if (TypeIs(it.second)) { + return GetValue(it.second); + } else if (TypeIs(it.second)) { + return GetValue(it.second); + } + } + } + + return -1; +} + +inline int toInt(flutter::EncodableValue inputVal, int defaultVal) { + int intValue = defaultVal; + if (TypeIs(inputVal)) { + intValue = GetValue(inputVal); + } else if (TypeIs(inputVal)) { + intValue = GetValue(inputVal); + } else if (TypeIs(inputVal)) { + intValue = atoi(GetValue(inputVal).c_str()); + } + return intValue; +} + +class MethodCallProxy { + public: + static std::unique_ptr Create(const MethodCall& call); + virtual ~MethodCallProxy() = default; + // The name of the method being called. + virtual const std::string& method_name() const = 0; + + // The arguments to the method call, or NULL if there are none. + virtual const EncodableValue* arguments() const = 0; +}; + +class MethodResultProxy { + public: + static std::unique_ptr Create( + std::unique_ptr method_result); + + virtual ~MethodResultProxy() = default; + + // Reports success with no result. + virtual void Success() = 0; + + // Reports success with a result. + virtual void Success(const EncodableValue& result) = 0; + + // Reports an error. + virtual void Error(const std::string& error_code, + const std::string& error_message, + const EncodableValue& error_details) = 0; + + // Reports an error with a default error code and no details. + virtual void Error(const std::string& error_code, + const std::string& error_message = "") = 0; + + virtual void NotImplemented() = 0; +}; + +class EventChannelProxy { + public: + static std::unique_ptr Create( + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName); + + virtual ~EventChannelProxy() = default; + + virtual void Success(const EncodableValue& event, + bool cache_event = true) = 0; +}; + +#endif // FLUTTER_WEBRTC_COMMON_HXX diff --git a/common/cpp/include/flutter_data_channel.h b/common/cpp/include/flutter_data_channel.h new file mode 100644 index 0000000000..1e5bfd1584 --- /dev/null +++ b/common/cpp/include/flutter_data_channel.h @@ -0,0 +1,58 @@ +#ifndef FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX +#define FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +namespace flutter_webrtc_plugin { + +class FlutterRTCDataChannelObserver : public RTCDataChannelObserver { + public: + FlutterRTCDataChannelObserver(scoped_refptr data_channel, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channel_name); + virtual ~FlutterRTCDataChannelObserver(); + + virtual void OnStateChange(RTCDataChannelState state) override; + + virtual void OnMessage(const char* buffer, int length, bool binary) override; + + scoped_refptr data_channel() { return data_channel_; } + + private: + std::unique_ptr event_channel_; + scoped_refptr data_channel_; +}; + +class FlutterDataChannel { + public: + FlutterDataChannel(FlutterWebRTCBase* base) : base_(base) {} + + void CreateDataChannel(const std::string& peerConnectionId, + const std::string& label, + const EncodableMap& dataChannelDict, + RTCPeerConnection* pc, + std::unique_ptr); + + void DataChannelSend(RTCDataChannel* data_channel, + const std::string& type, + const EncodableValue& data, + std::unique_ptr); + + void DataChannelGetBufferedAmount(RTCDataChannel* data_channel, + std::unique_ptr result); + + void DataChannelClose(RTCDataChannel* data_channel, + const std::string& data_channel_uuid, + std::unique_ptr); + + RTCDataChannel* DataChannelForId(const std::string& id); + + private: + FlutterWebRTCBase* base_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_frame_capturer.h b/common/cpp/include/flutter_frame_capturer.h new file mode 100644 index 0000000000..41e9a6556e --- /dev/null +++ b/common/cpp/include/flutter_frame_capturer.h @@ -0,0 +1,37 @@ +#ifndef FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX +#define FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_video_frame.h" +#include "rtc_video_renderer.h" + +#include + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterFrameCapturer + : public RTCVideoRenderer> { + public: + FlutterFrameCapturer(RTCVideoTrack* track, std::string path); + + virtual void OnFrame(scoped_refptr frame) override; + + void CaptureFrame(std::unique_ptr result); + + private: + RTCVideoTrack* track_; + std::string path_; + std::mutex mutex_; + scoped_refptr frame_; + volatile bool catch_frame_; + + bool SaveFrame(); +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_frame_cryptor.h b/common/cpp/include/flutter_frame_cryptor.h new file mode 100644 index 0000000000..36756272f9 --- /dev/null +++ b/common/cpp/include/flutter_frame_cryptor.h @@ -0,0 +1,103 @@ +#ifndef FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX +#define FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_frame_cryptor.h" + +namespace flutter_webrtc_plugin { + +class FlutterFrameCryptorObserver : public libwebrtc::RTCFrameCryptorObserver { + public: + FlutterFrameCryptorObserver(BinaryMessenger* messenger, TaskRunner* task_runner, const std::string& channelName) + : event_channel_(EventChannelProxy::Create(messenger, task_runner, channelName)) {} + void OnFrameCryptionStateChanged( + const string participant_id, + libwebrtc::RTCFrameCryptionState state); + private: + std::unique_ptr event_channel_; +}; + +class FlutterFrameCryptor { + public: + FlutterFrameCryptor(FlutterWebRTCBase* base) : base_(base) {} + + // Since this takes ownership of result, ownership will be passed back to 'outResult' if this function fails + bool HandleFrameCryptorMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result, + std::unique_ptr *outResult); + + void FrameCryptorFactoryCreateFrameCryptor( + const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorSetKeyIndex(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorGetKeyIndex(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorSetEnabled(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorGetEnabled(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorDispose(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorFactoryCreateKeyProvider( + const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetSharedKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderRatchetSharedKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderExportSharedKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderRatchetKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderExportKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetSifTrailer(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderDispose(const EncodableMap& constraints, + std::unique_ptr result); + + // std::unique_ptr result); + // 'keyProviderSetKey', + // 'keyProviderSetKeys', + // 'keyProviderGetKeys', + // 'keyProviderDispose', + // 'frameCryptorFactoryCreateFrameCryptor', + // 'frameCryptorFactoryCreateKeyProvider', + // 'frameCryptorSetKeyIndex', + // 'frameCryptorGetKeyIndex', + // 'frameCryptorSetEnabled', + // 'frameCryptorGetEnabled', + // 'frameCryptorDispose', + + private: + FlutterWebRTCBase* base_; + std::map> + frame_cryptors_; + std::map> + frame_cryptor_observers_; + std::map> key_providers_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX diff --git a/common/cpp/include/flutter_media_stream.h b/common/cpp/include/flutter_media_stream.h new file mode 100644 index 0000000000..8139a56174 --- /dev/null +++ b/common/cpp/include/flutter_media_stream.h @@ -0,0 +1,57 @@ +#ifndef FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX +#define FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +namespace flutter_webrtc_plugin { + +class FlutterMediaStream { + public: + FlutterMediaStream(FlutterWebRTCBase* base); + + void GetUserMedia(const EncodableMap& constraints, + std::unique_ptr result); + + void GetUserAudio(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params); + + void GetUserVideo(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params); + + void GetSources(std::unique_ptr result); + + void SelectAudioOutput(const std::string& device_id, + std::unique_ptr result); + + void SelectAudioInput(const std::string& device_id, + std::unique_ptr result); + + void MediaStreamGetTracks(const std::string& stream_id, + std::unique_ptr result); + + void MediaStreamDispose(const std::string& stream_id, + std::unique_ptr result); + + void MediaStreamTrackSetEnable(const std::string& track_id, + std::unique_ptr result); + + void MediaStreamTrackSwitchCamera(const std::string& track_id, + std::unique_ptr result); + + void MediaStreamTrackDispose(const std::string& track_id, + std::unique_ptr result); + + void CreateLocalMediaStream(std::unique_ptr result); + + void OnDeviceChange(); + + private: + FlutterWebRTCBase* base_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX diff --git a/common/cpp/include/flutter_peerconnection.h b/common/cpp/include/flutter_peerconnection.h new file mode 100644 index 0000000000..699823dfdc --- /dev/null +++ b/common/cpp/include/flutter_peerconnection.h @@ -0,0 +1,209 @@ +#ifndef FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX +#define FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +namespace flutter_webrtc_plugin { + +class FlutterPeerConnectionObserver : public RTCPeerConnectionObserver { + public: + FlutterPeerConnectionObserver(FlutterWebRTCBase* base, + scoped_refptr peerconnection, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channel_name, + std::string& peerConnectionId); + + virtual void OnSignalingState(RTCSignalingState state) override; + virtual void OnPeerConnectionState(RTCPeerConnectionState state) override; + virtual void OnIceGatheringState(RTCIceGatheringState state) override; + virtual void OnIceConnectionState(RTCIceConnectionState state) override; + virtual void OnIceCandidate( + scoped_refptr candidate) override; + virtual void OnAddStream(scoped_refptr stream) override; + virtual void OnRemoveStream(scoped_refptr stream) override; + + virtual void OnTrack(scoped_refptr transceiver) override; + virtual void OnAddTrack(vector> streams, + scoped_refptr receiver) override; + virtual void OnRemoveTrack(scoped_refptr receiver) override; + virtual void OnDataChannel( + scoped_refptr data_channel) override; + virtual void OnRenegotiationNeeded() override; + + scoped_refptr MediaStreamForId(const std::string& id); + + scoped_refptr MediaTrackForId(const std::string& id); + + void RemoveStreamForId(const std::string& id); + + private: + std::unique_ptr event_channel_; + scoped_refptr peerconnection_; + std::map> remote_streams_; + FlutterWebRTCBase* base_; + std::string id_; +}; + +class FlutterPeerConnection { + public: + FlutterPeerConnection(FlutterWebRTCBase* base) : base_(base) {} + + void CreateRTCPeerConnection(const EncodableMap& configuration, + const EncodableMap& constraints, + std::unique_ptr result); + + void RTCPeerConnectionClose(RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result); + + void RTCPeerConnectionDispose(RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result); + + void CreateOffer(const EncodableMap& constraints, + RTCPeerConnection* pc, + std::unique_ptr result); + + void CreateAnswer(const EncodableMap& constraints, + RTCPeerConnection* pc, + std::unique_ptr result); + + void SetLocalDescription(RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result); + + void SetRemoteDescription(RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result); + + void GetLocalDescription(RTCPeerConnection* pc, + std::unique_ptr result); + + void GetRemoteDescription(RTCPeerConnection* pc, + std::unique_ptr result); + + scoped_refptr mapToRtpTransceiverInit( + const EncodableMap& transceiverInit); + + RTCRtpTransceiverDirection stringToTransceiverDirection( + std::string direction); + + libwebrtc::scoped_refptr mapToEncoding( + const EncodableMap& parameters); + + void AddTransceiver(RTCPeerConnection* pc, + const std::string& trackId, + const std::string& mediaType, + const EncodableMap& transceiverInit, + std::unique_ptr result); + + void GetTransceivers(RTCPeerConnection* pc, + std::unique_ptr result); + + void GetReceivers(RTCPeerConnection* pc, + std::unique_ptr result); + + void RtpSenderSetTrack(RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result); + + void RtpSenderSetStream(RTCPeerConnection* pc, + std::vector streamIds, + std::string rtpSenderId, + std::unique_ptr result); + + void RtpSenderReplaceTrack(RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result); + + scoped_refptr updateRtpParameters( + EncodableMap newParameters, + scoped_refptr parameters); + + void RtpSenderSetParameters(RTCPeerConnection* pc, + std::string rtpSenderId, + const EncodableMap& parameters, + std::unique_ptr result); + + void RtpTransceiverStop(RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result); + + void RtpTransceiverGetCurrentDirection( + RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result); + + void SetConfiguration(RTCPeerConnection* pc, + const EncodableMap& configuration, + std::unique_ptr result); + + void CaptureFrame(RTCVideoTrack* track, + std::string path, + std::unique_ptr result); + + scoped_refptr getRtpTransceiverById(RTCPeerConnection* pc, + std::string id); + + void RtpTransceiverSetDirection(RTCPeerConnection* pc, + std::string transceiverId, + std::string direction, + std::unique_ptr result); + + void RtpTransceiverSetCodecPreferences( + RTCPeerConnection* pc, + std::string transceiverId, + const EncodableList codecs, + std::unique_ptr result); + + void GetSenders(RTCPeerConnection* pc, + std::unique_ptr result); + + void AddIceCandidate(RTCIceCandidate* candidate, + RTCPeerConnection* pc, + std::unique_ptr result); + + void GetStats(const std::string& track_id, + RTCPeerConnection* pc, + std::unique_ptr result); + + void MediaStreamAddTrack(scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result); + + void MediaStreamRemoveTrack(scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result); + + void AddTrack(RTCPeerConnection* pc, + scoped_refptr track, + std::vector streamIds, + std::unique_ptr result); + + void RemoveTrack(RTCPeerConnection* pc, + std::string senderId, + std::unique_ptr result); + + private: + FlutterWebRTCBase* base_; +}; + +std::string RTCMediaTypeToString(RTCMediaType type); + +std::string transceiverDirectionString(RTCRtpTransceiverDirection direction); + +const char* iceConnectionStateString(RTCIceConnectionState state); + +const char* signalingStateString(RTCSignalingState state); + +const char* peerConnectionStateString(RTCPeerConnectionState state); + +const char* iceGatheringStateString(RTCIceGatheringState state); + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_screen_capture.h b/common/cpp/include/flutter_screen_capture.h new file mode 100644 index 0000000000..07b4501e5e --- /dev/null +++ b/common/cpp/include/flutter_screen_capture.h @@ -0,0 +1,60 @@ +#ifndef FLUTTER_SCRREN_CAPTURE_HXX +#define FLUTTER_SCRREN_CAPTURE_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_desktop_capturer.h" +#include "rtc_desktop_media_list.h" + +namespace flutter_webrtc_plugin { + +class FlutterScreenCapture : public MediaListObserver, + public DesktopCapturerObserver { + public: + FlutterScreenCapture(FlutterWebRTCBase* base); + + void GetDisplayMedia(const EncodableMap& constraints, + std::unique_ptr result); + + void GetDesktopSources(const EncodableList& types, + std::unique_ptr result); + + void UpdateDesktopSources(const EncodableList& types, + std::unique_ptr result); + + void GetDesktopSourceThumbnail(std::string source_id, + int width, + int height, + std::unique_ptr result); + + protected: + void OnMediaSourceAdded(scoped_refptr source) override; + + void OnMediaSourceRemoved(scoped_refptr source) override; + + void OnMediaSourceNameChanged(scoped_refptr source) override; + + void OnMediaSourceThumbnailChanged( + scoped_refptr source) override; + + void OnStart(scoped_refptr capturer) override; + + void OnPaused(scoped_refptr capturer) override; + + void OnStop(scoped_refptr capturer) override; + + void OnError(scoped_refptr capturer) override; + + private: + bool BuildDesktopSourcesList(const EncodableList& types, bool force_reload); + + private: + FlutterWebRTCBase* base_; + std::map> medialist_; + std::vector> sources_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // FLUTTER_SCRREN_CAPTURE_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_video_renderer.h b/common/cpp/include/flutter_video_renderer.h new file mode 100644 index 0000000000..b2454f8458 --- /dev/null +++ b/common/cpp/include/flutter_video_renderer.h @@ -0,0 +1,84 @@ +#ifndef FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX +#define FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" + +#include "rtc_video_frame.h" +#include "rtc_video_renderer.h" + +#include + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterVideoRenderer + : public RTCVideoRenderer>, + public RefCountInterface { + public: + FlutterVideoRenderer() = default; + ~FlutterVideoRenderer(); + + void initialize(TextureRegistrar* registrar, + BinaryMessenger* messenger, + TaskRunner* task_runner, + std::unique_ptr texture, + int64_t texture_id); + + virtual const FlutterDesktopPixelBuffer* CopyPixelBuffer(size_t width, + size_t height) const; + + virtual void OnFrame(scoped_refptr frame) override; + + void SetVideoTrack(scoped_refptr track); + + int64_t texture_id() { return texture_id_; } + + bool CheckMediaStream(std::string mediaId); + + bool CheckVideoTrack(std::string mediaId); + + std::string media_stream_id; + + private: + struct FrameSize { + size_t width; + size_t height; + }; + FrameSize last_frame_size_ = {0, 0}; + bool first_frame_rendered = false; + TextureRegistrar* registrar_ = nullptr; + std::unique_ptr event_channel_; + int64_t texture_id_ = -1; + scoped_refptr track_ = nullptr; + scoped_refptr frame_; + std::unique_ptr texture_; + std::shared_ptr pixel_buffer_; + mutable std::shared_ptr rgb_buffer_; + mutable std::mutex mutex_; + RTCVideoFrame::VideoRotation rotation_ = RTCVideoFrame::kVideoRotation_0; +}; + +class FlutterVideoRendererManager { + public: + FlutterVideoRendererManager(FlutterWebRTCBase* base); + + void CreateVideoRendererTexture(std::unique_ptr result); + + void VideoRendererSetSrcObject(int64_t texture_id, + const std::string& stream_id, + const std::string& owner_tag, + const std::string& track_id); + + void VideoRendererDispose(int64_t texture_id, + std::unique_ptr result); + + private: + FlutterWebRTCBase* base_; + std::map> renderers_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_webrtc.h b/common/cpp/include/flutter_webrtc.h new file mode 100644 index 0000000000..573956b9aa --- /dev/null +++ b/common/cpp/include/flutter_webrtc.h @@ -0,0 +1,45 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_HXX +#define PLUGINS_FLUTTER_WEBRTC_HXX + +#include "flutter_common.h" + +#include "flutter_data_channel.h" +#include "flutter_frame_cryptor.h" +#include "flutter_media_stream.h" +#include "flutter_peerconnection.h" +#include "flutter_screen_capture.h" +#include "flutter_video_renderer.h" + +#include "libwebrtc.h" + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterWebRTCPlugin : public flutter::Plugin { + public: + virtual BinaryMessenger* messenger() = 0; + + virtual TextureRegistrar* textures() = 0; + + virtual TaskRunner* task_runner() = 0; +}; + +class FlutterWebRTC : public FlutterWebRTCBase, + public FlutterVideoRendererManager, + public FlutterMediaStream, + public FlutterPeerConnection, + public FlutterScreenCapture, + public FlutterDataChannel, + public FlutterFrameCryptor { + public: + FlutterWebRTC(FlutterWebRTCPlugin* plugin); + virtual ~FlutterWebRTC(); + + void HandleMethodCall(const MethodCallProxy& method_call, + std::unique_ptr result); +}; + +} // namespace flutter_webrtc_plugin + +#endif // PLUGINS_FLUTTER_WEBRTC_HXX diff --git a/common/cpp/include/flutter_webrtc_base.h b/common/cpp/include/flutter_webrtc_base.h new file mode 100644 index 0000000000..e894978ab1 --- /dev/null +++ b/common/cpp/include/flutter_webrtc_base.h @@ -0,0 +1,136 @@ +#ifndef FLUTTER_WEBRTC_BASE_HXX +#define FLUTTER_WEBRTC_BASE_HXX + +#include "flutter_common.h" + +#include +#include +#include +#include +#include + +#include "libwebrtc.h" + +#include "rtc_audio_device.h" +#include "rtc_audio_processing.h" +#include "rtc_desktop_device.h" +#include "rtc_dtmf_sender.h" +#include "rtc_media_stream.h" +#include "rtc_media_track.h" +#include "rtc_mediaconstraints.h" +#include "rtc_peerconnection.h" +#include "rtc_peerconnection_factory.h" +#include "rtc_video_device.h" + +namespace flutter_webrtc_plugin { + +using namespace libwebrtc; + +class FlutterVideoRenderer; +class FlutterRTCDataChannelObserver; +class FlutterPeerConnectionObserver; + +class FlutterWebRTCBase { + public: + friend class FlutterMediaStream; + friend class FlutterPeerConnection; + friend class FlutterVideoRendererManager; + friend class FlutterDataChannel; + friend class FlutterPeerConnectionObserver; + friend class FlutterScreenCapture; + friend class FlutterFrameCryptor; + enum ParseConstraintType { kMandatory, kOptional }; + + public: + FlutterWebRTCBase(BinaryMessenger* messenger, TextureRegistrar* textures, TaskRunner* task_runner); + ~FlutterWebRTCBase(); + + virtual scoped_refptr audio_processing() { + return audio_processing_; + } + + virtual scoped_refptr MediaTrackForId(const std::string& id); + + std::string GenerateUUID(); + + RTCPeerConnection* PeerConnectionForId(const std::string& id); + + void RemovePeerConnectionForId(const std::string& id); + + void RemoveMediaTrackForId(const std::string& id); + + FlutterPeerConnectionObserver* PeerConnectionObserversForId( + const std::string& id); + + void RemovePeerConnectionObserversForId(const std::string& id); + + scoped_refptr MediaStreamForId( + const std::string& id, + std::string ownerTag = std::string()); + + void RemoveStreamForId(const std::string& id); + + bool ParseConstraints(const EncodableMap& constraints, + RTCConfiguration* configuration); + + scoped_refptr ParseMediaConstraints( + const EncodableMap& constraints); + + bool ParseRTCConfiguration(const EncodableMap& map, + RTCConfiguration& configuration); + + scoped_refptr MediaTracksForId(const std::string& id); + + void RemoveTracksForId(const std::string& id); + + EventChannelProxy* event_channel(); + + + libwebrtc::scoped_refptr GetRtpSenderById( + RTCPeerConnection* pc, + std::string id); + + libwebrtc::scoped_refptr GetRtpReceiverById( + RTCPeerConnection* pc, + std::string id); + + private: + void ParseConstraints(const EncodableMap& src, + scoped_refptr mediaConstraints, + ParseConstraintType type = kMandatory); + + bool CreateIceServers(const EncodableList& iceServersArray, + IceServer* ice_servers); + + protected: + scoped_refptr factory_; + scoped_refptr audio_device_; + scoped_refptr video_device_; + scoped_refptr desktop_device_; + scoped_refptr audio_processing_; + RTCConfiguration configuration_; + + std::map> peerconnections_; + std::map> local_streams_; + std::map> local_tracks_; + std::map> video_capturers_; + std::map> renders_; + std::map> + data_channel_observers_; + std::map> + peerconnection_observers_; + mutable std::mutex mutex_; + + void lock() { mutex_.lock(); } + void unlock() { mutex_.unlock(); } + + protected: + BinaryMessenger* messenger_; + TaskRunner *task_runner_; + TextureRegistrar* textures_; + std::unique_ptr event_channel_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // !FLUTTER_WEBRTC_BASE_HXX diff --git a/common/cpp/include/task_runner.h b/common/cpp/include/task_runner.h new file mode 100644 index 0000000000..74c510c581 --- /dev/null +++ b/common/cpp/include/task_runner.h @@ -0,0 +1,17 @@ +// Copyright 2024 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + #ifndef PACKAGES_FLUTTER_WEBRTC_TASK_RUNNER_H_ + #define PACKAGES_FLUTTER_WEBRTC_TASK_RUNNER_H_ + + #include + + using TaskClosure = std::function; + + class TaskRunner { + public: + virtual void EnqueueTask(TaskClosure task) = 0; + virtual ~TaskRunner() = default; + }; + + #endif // PACKAGES_FLUTTER_WEBRTC_TASK_RUNNER_H_ \ No newline at end of file diff --git a/common/cpp/src/flutter_common.cc b/common/cpp/src/flutter_common.cc new file mode 100644 index 0000000000..1daa606a17 --- /dev/null +++ b/common/cpp/src/flutter_common.cc @@ -0,0 +1,143 @@ +#include "flutter_common.h" +#include "task_runner.h" + +#include + +class MethodCallProxyImpl : public MethodCallProxy { + public: + explicit MethodCallProxyImpl(const MethodCall& method_call) + : method_call_(method_call) {} + + ~MethodCallProxyImpl() {} + + // The name of the method being called. + + const std::string& method_name() const override { + return method_call_.method_name(); + } + + // The arguments to the method call, or NULL if there are none. + const EncodableValue* arguments() const override { + return method_call_.arguments(); + } + + private: + const MethodCall& method_call_; +}; + +std::unique_ptr MethodCallProxy::Create( + const MethodCall& call) { + return std::make_unique(call); +} + +class MethodResultProxyImpl : public MethodResultProxy { + public: + explicit MethodResultProxyImpl(std::unique_ptr method_result) + : method_result_(std::move(method_result)) {} + ~MethodResultProxyImpl() {} + + // Reports success with no result. + void Success() override { method_result_->Success(); } + + // Reports success with a result. + void Success(const EncodableValue& result) override { + method_result_->Success(result); + } + + // Reports an error. + void Error(const std::string& error_code, + const std::string& error_message, + const EncodableValue& error_details) override { + method_result_->Error(error_code, error_message, error_details); + } + + // Reports an error with a default error code and no details. + void Error(const std::string& error_code, + const std::string& error_message = "") override { + method_result_->Error(error_code, error_message); + } + + void NotImplemented() override { method_result_->NotImplemented(); } + + private: + std::unique_ptr method_result_; +}; + +std::unique_ptr MethodResultProxy::Create( + std::unique_ptr method_result) { + return std::make_unique(std::move(method_result)); +} + +class EventChannelProxyImpl : public EventChannelProxy { + public: + EventChannelProxyImpl(BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName) + : channel_(std::make_unique( + messenger, + channelName, + &flutter::StandardMethodCodec::GetInstance())), + task_runner_(task_runner) { + auto handler = std::make_unique< + flutter::StreamHandlerFunctions>( + [&](const EncodableValue* arguments, + std::unique_ptr>&& events) + -> std::unique_ptr> { + sink_ = std::move(events); + std::weak_ptr weak_sink = sink_; + for (auto& event : event_queue_) { + PostEvent(event); + } + event_queue_.clear(); + on_listen_called_ = true; + return nullptr; + }, + [&](const EncodableValue* arguments) + -> std::unique_ptr> { + on_listen_called_ = false; + return nullptr; + }); + + channel_->SetStreamHandler(std::move(handler)); + } + + virtual ~EventChannelProxyImpl() {} + + void Success(const EncodableValue& event, bool cache_event = true) override { + if (on_listen_called_) { + PostEvent(event); + } else { + if (cache_event) { + event_queue_.push_back(event); + } + } + } + + void PostEvent(const EncodableValue& event) { + if(task_runner_) { + std::weak_ptr weak_sink = sink_; + task_runner_->EnqueueTask([weak_sink, event]() { + auto sink = weak_sink.lock(); + if (sink) { + sink->Success(event); + } + }); + } else { + sink_->Success(event); + } + } + + private: + std::unique_ptr channel_; + std::shared_ptr> sink_; + std::list event_queue_; + bool on_listen_called_ = false; + TaskRunner* task_runner_; + }; + +std::unique_ptr EventChannelProxy::Create( + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName) { + return std::make_unique(messenger, task_runner, channelName); +} \ No newline at end of file diff --git a/common/cpp/src/flutter_data_channel.cc b/common/cpp/src/flutter_data_channel.cc new file mode 100644 index 0000000000..37afd12b54 --- /dev/null +++ b/common/cpp/src/flutter_data_channel.cc @@ -0,0 +1,158 @@ +#include "flutter_data_channel.h" + +#include + +namespace flutter_webrtc_plugin { + +FlutterRTCDataChannelObserver::FlutterRTCDataChannelObserver( + scoped_refptr data_channel, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channelName) + : event_channel_(EventChannelProxy::Create(messenger, task_runner, channelName)), + data_channel_(data_channel) { + data_channel_->RegisterObserver(this); +} + +FlutterRTCDataChannelObserver::~FlutterRTCDataChannelObserver() {} + +void FlutterDataChannel::CreateDataChannel( + const std::string& peerConnectionId, + const std::string& label, + const EncodableMap& dataChannelDict, + RTCPeerConnection* pc, + std::unique_ptr result) { + RTCDataChannelInit init; + init.id = GetValue(dataChannelDict.find(EncodableValue("id"))->second); + init.ordered = + GetValue(dataChannelDict.find(EncodableValue("ordered"))->second); + + if (dataChannelDict.find(EncodableValue("maxRetransmits")) != + dataChannelDict.end()) { + init.maxRetransmits = GetValue( + dataChannelDict.find(EncodableValue("maxRetransmits"))->second); + } + + std::string protocol = "sctp"; + + if (dataChannelDict.find(EncodableValue("protocol")) == + dataChannelDict.end()) { + protocol = GetValue( + dataChannelDict.find(EncodableValue("protocol"))->second); + } + + init.protocol = protocol; + + init.negotiated = GetValue( + dataChannelDict.find(EncodableValue("negotiated"))->second); + + scoped_refptr data_channel = + pc->CreateDataChannel(label.c_str(), &init); + + std::string uuid = base_->GenerateUUID(); + std::string event_channel = + "FlutterWebRTC/dataChannelEvent" + peerConnectionId + uuid; + + std::unique_ptr observer( + new FlutterRTCDataChannelObserver(data_channel, base_->messenger_, base_->task_runner_, + event_channel)); + + base_->lock(); + base_->data_channel_observers_[uuid] = std::move(observer); + base_->unlock(); + + EncodableMap params; + params[EncodableValue("id")] = EncodableValue(init.id); + params[EncodableValue("label")] = + EncodableValue(data_channel->label().std_string()); + params[EncodableValue("flutterId")] = EncodableValue(uuid); + result->Success(EncodableValue(params)); +} + +void FlutterDataChannel::DataChannelSend( + RTCDataChannel* data_channel, + const std::string& type, + const EncodableValue& data, + std::unique_ptr result) { + bool is_binary = type == "binary"; + if (is_binary && TypeIs>(data)) { + std::vector buffer = GetValue>(data); + data_channel->Send(buffer.data(), static_cast(buffer.size()), + true); + } else { + std::string str = GetValue(data); + data_channel->Send(reinterpret_cast(str.c_str()), + static_cast(str.length()), false); + } + result->Success(); +} + +void FlutterDataChannel::DataChannelGetBufferedAmount(RTCDataChannel* data_channel, + std::unique_ptr result) { + EncodableMap params; + params[EncodableValue("bufferedAmount")] = EncodableValue((int64_t)data_channel->buffered_amount()); + result->Success(EncodableValue(params)); +} + +void FlutterDataChannel::DataChannelClose( + RTCDataChannel* data_channel, + const std::string& data_channel_uuid, + std::unique_ptr result) { + data_channel->Close(); + auto it = base_->data_channel_observers_.find(data_channel_uuid); + if (it != base_->data_channel_observers_.end()) + base_->data_channel_observers_.erase(it); + result->Success(); +} + +RTCDataChannel* FlutterDataChannel::DataChannelForId(const std::string& uuid) { + auto it = base_->data_channel_observers_.find(uuid); + + if (it != base_->data_channel_observers_.end()) { + FlutterRTCDataChannelObserver* observer = it->second.get(); + scoped_refptr data_channel = observer->data_channel(); + return data_channel.get(); + } + return nullptr; +} + +static const char* DataStateString(RTCDataChannelState state) { + switch (state) { + case RTCDataChannelConnecting: + return "connecting"; + case RTCDataChannelOpen: + return "open"; + case RTCDataChannelClosing: + return "closing"; + case RTCDataChannelClosed: + return "closed"; + } + return ""; +} + +void FlutterRTCDataChannelObserver::OnStateChange(RTCDataChannelState state) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("dataChannelStateChanged"); + params[EncodableValue("id")] = EncodableValue(data_channel_->id()); + params[EncodableValue("state")] = EncodableValue(DataStateString(state)); + auto data = EncodableValue(params); + event_channel_->Success(data); +} + +void FlutterRTCDataChannelObserver::OnMessage(const char* buffer, + int length, + bool binary) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("dataChannelReceiveMessage"); + + params[EncodableValue("id")] = EncodableValue(data_channel_->id()); + params[EncodableValue("type")] = EncodableValue(binary ? "binary" : "text"); + std::string str(buffer, length); + params[EncodableValue("data")] = + binary ? EncodableValue(std::vector(str.begin(), str.end())) + : EncodableValue(str); + + auto data = EncodableValue(params); + event_channel_->Success(data); +} +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_frame_capturer.cc b/common/cpp/src/flutter_frame_capturer.cc new file mode 100644 index 0000000000..4d0026d74f --- /dev/null +++ b/common/cpp/src/flutter_frame_capturer.cc @@ -0,0 +1,76 @@ +#ifdef _MSC_VER +#define _CRT_SECURE_NO_WARNINGS +#endif + +#include "flutter_frame_capturer.h" +#include +#include +#include "svpng.hpp" + +namespace flutter_webrtc_plugin { + +FlutterFrameCapturer::FlutterFrameCapturer(RTCVideoTrack* track, + std::string path) { + track_ = track; + path_ = path; +} + +void FlutterFrameCapturer::OnFrame(scoped_refptr frame) { + if (frame_ != nullptr) { + return; + } + + frame_ = frame.get()->Copy(); + catch_frame_ = true; +} + +void FlutterFrameCapturer::CaptureFrame( + std::unique_ptr result) { + mutex_.lock(); + // Here init catch_frame_ flag + catch_frame_ = false; + + track_->AddRenderer(this); + // Here waiting for catch_frame_ is set to true + while(!catch_frame_){} + // Here unlock the mutex + mutex_.unlock(); + + mutex_.lock(); + track_->RemoveRenderer(this); + + bool success = SaveFrame(); + mutex_.unlock(); + + std::shared_ptr result_ptr(result.release()); + if (success) { + result_ptr->Success(); + } else { + result_ptr->Error("1", "Cannot save the frame as .png file"); + } +} + +bool FlutterFrameCapturer::SaveFrame() { + if (frame_ == nullptr) { + return false; + } + + int width = frame_.get()->width(); + int height = frame_.get()->height(); + int bytes_per_pixel = 4; + uint8_t* pixels = new uint8_t[width * height * bytes_per_pixel]; + + frame_.get()->ConvertToARGB(RTCVideoFrame::Type::kABGR, pixels, + /* unused */ -1, width, height); + + FILE* file = fopen(path_.c_str(), "wb"); + if (!file) { + return false; + } + + svpng(file, width, height, pixels, 1); + fclose(file); + return true; +} + +} // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/common/cpp/src/flutter_frame_cryptor.cc b/common/cpp/src/flutter_frame_cryptor.cc new file mode 100644 index 0000000000..a9e44e9bd6 --- /dev/null +++ b/common/cpp/src/flutter_frame_cryptor.cc @@ -0,0 +1,608 @@ +#include "flutter_frame_cryptor.h" + +#include "base/scoped_ref_ptr.h" + +namespace flutter_webrtc_plugin { + +libwebrtc::Algorithm AlgorithmFromInt(int algorithm) { + switch (algorithm) { + case 0: + return libwebrtc::Algorithm::kAesGcm; + case 1: + return libwebrtc::Algorithm::kAesCbc; + default: + return libwebrtc::Algorithm::kAesGcm; + } +} + +std::string frameCryptionStateToString(libwebrtc::RTCFrameCryptionState state) { + switch (state) { + case RTCFrameCryptionState::kNew: + return "new"; + case RTCFrameCryptionState::kOk: + return "ok"; + case RTCFrameCryptionState::kDecryptionFailed: + return "decryptionFailed"; + case RTCFrameCryptionState::kEncryptionFailed: + return "encryptionFailed"; + case RTCFrameCryptionState::kInternalError: + return "internalError"; + case RTCFrameCryptionState::kKeyRatcheted: + return "keyRatcheted"; + case RTCFrameCryptionState::kMissingKey: + return "missingKey"; + } + return ""; +} + +void FlutterFrameCryptorObserver::OnFrameCryptionStateChanged( + const string participant_id, + libwebrtc::RTCFrameCryptionState state) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("frameCryptionStateChanged"); + params[EncodableValue("participantId")] = EncodableValue(participant_id.std_string()); + params[EncodableValue("state")] = + EncodableValue(frameCryptionStateToString(state)); + event_channel_->Success(EncodableValue(params)); +} + +bool FlutterFrameCryptor::HandleFrameCryptorMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result, + std::unique_ptr *outResult) { + const std::string& method_name = method_call.method_name(); + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return true; + } + const EncodableMap params = GetValue(*method_call.arguments()); + + if (method_name == "frameCryptorFactoryCreateFrameCryptor") { + FrameCryptorFactoryCreateFrameCryptor(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorSetKeyIndex") { + FrameCryptorSetKeyIndex(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorGetKeyIndex") { + FrameCryptorGetKeyIndex(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorSetEnabled") { + FrameCryptorSetEnabled(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorGetEnabled") { + FrameCryptorGetEnabled(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorDispose") { + FrameCryptorDispose(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorFactoryCreateKeyProvider") { + FrameCryptorFactoryCreateKeyProvider(params, std::move(result)); + return true; + } else if (method_name == "keyProviderSetSharedKey") { + KeyProviderSetSharedKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderRatchetSharedKey") { + KeyProviderRatchetSharedKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderExportSharedKey") { + KeyProviderExportSharedKey(params, std::move(result)); + return true; + }else if (method_name == "keyProviderSetKey") { + KeyProviderSetKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderRatchetKey") { + KeyProviderRatchetKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderExportKey") { + KeyProviderExportKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderSetSifTrailer") { + KeyProviderSetSifTrailer(params, std::move(result)); + return true; + } else if (method_name == "keyProviderDispose") { + KeyProviderDispose(params, std::move(result)); + return true; + } + + *outResult = std::move(result); + return false; +} + +void FlutterFrameCryptor::FrameCryptorFactoryCreateFrameCryptor( + const EncodableMap& constraints, + std::unique_ptr result) { + auto type = findString(constraints, "type"); + if (type == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "type is null"); + return; + } + + auto peerConnectionId = findString(constraints, "peerConnectionId"); + if (peerConnectionId == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "peerConnectionId is null"); + return; + } + + RTCPeerConnection* pc = base_->PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error( + "FrameCryptorFactoryCreateFrameCryptorFailed", + "FrameCryptorFactoryCreateFrameCryptor() peerConnection is null"); + return; + } + + auto rtpSenderId = findString(constraints, "rtpSenderId"); + auto rtpReceiverId = findString(constraints, "rtpReceiverId"); + + if (rtpReceiverId == std::string() && rtpSenderId == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "rtpSenderId or rtpReceiverId is null"); + return; + } + + auto algorithm = findInt(constraints, "algorithm"); + auto participantId = findString(constraints, "participantId"); + auto keyProviderId = findString(constraints, "keyProviderId"); + + if (type == "sender") { + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "sender is null"); + return; + } + std::string uuid = base_->GenerateUUID(); + auto keyProvider = key_providers_[keyProviderId]; + if (keyProvider == nullptr) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "keyProvider is null"); + return; + } + auto frameCryptor = + libwebrtc::FrameCryptorFactory::frameCryptorFromRtpSender(base_->factory_, + string(participantId), sender, AlgorithmFromInt(algorithm), + keyProvider); + std::string event_channel = "FlutterWebRTC/frameCryptorEvent" + uuid; + + scoped_refptr observer(new RefCountedObject(base_->messenger_, base_->task_runner_, event_channel)); + + frameCryptor->RegisterRTCFrameCryptorObserver(observer); + + frame_cryptors_[uuid] = frameCryptor; + frame_cryptor_observers_[uuid] = observer; + EncodableMap params; + params[EncodableValue("frameCryptorId")] = uuid; + + result->Success(EncodableValue(params)); + } else if (type == "receiver") { + auto receiver = base_->GetRtpReceiverById(pc, rtpReceiverId); + if (nullptr == receiver.get()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "receiver is null"); + return; + } + std::string uuid = base_->GenerateUUID(); + auto keyProvider = key_providers_[keyProviderId]; + auto frameCryptor = + libwebrtc::FrameCryptorFactory::frameCryptorFromRtpReceiver(base_->factory_, + string(participantId), receiver, AlgorithmFromInt(algorithm), + keyProvider); + + std::string event_channel = "FlutterWebRTC/frameCryptorEvent" + uuid; + + scoped_refptr observer(new RefCountedObject(base_->messenger_, base_->task_runner_, event_channel)); + + frameCryptor->RegisterRTCFrameCryptorObserver(observer.get()); + + frame_cryptors_[uuid] = frameCryptor; + frame_cryptor_observers_[uuid] = observer; + EncodableMap params; + params[EncodableValue("frameCryptorId")] = uuid; + + result->Success(EncodableValue(params)); + } else { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "type is not sender or receiver"); + } +} + +void FlutterFrameCryptor::FrameCryptorSetKeyIndex( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptor is null"); + return; + } + auto key_index = findInt(constraints, "keyIndex"); + auto res = frameCryptor->SetKeyIndex(key_index); + EncodableMap params; + params[EncodableValue("result")] = res; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorGetKeyIndex( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptor is null"); + return; + } + EncodableMap params; + params[EncodableValue("keyIndex")] = frameCryptor->key_index(); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorSetEnabled( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorSetEnabledFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorSetEnabledFailed", "frameCryptor is null"); + return; + } + auto enabled = findBoolean(constraints, "enabled"); + frameCryptor->SetEnabled(enabled); + EncodableMap params; + params[EncodableValue("result")] = enabled; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorGetEnabled( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetEnabledFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetEnabledFailed", "frameCryptor is null"); + return; + } + EncodableMap params; + params[EncodableValue("enabled")] = frameCryptor->enabled(); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorDispose( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorDisposeFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorDisposeFailed", "frameCryptor is null"); + return; + } + frameCryptor->DeRegisterRTCFrameCryptorObserver(); + frame_cryptors_.erase(frameCryptorId); + frame_cryptor_observers_.erase(frameCryptorId); + EncodableMap params; + params[EncodableValue("result")] = "success"; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorFactoryCreateKeyProvider( + const EncodableMap& constraints, + std::unique_ptr result) { + libwebrtc::KeyProviderOptions options; + + + auto keyProviderOptions = findMap(constraints, "keyProviderOptions"); + if (keyProviderOptions == EncodableMap()) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", "keyProviderOptions is null"); + return; + } + + auto sharedKey = findBoolean(keyProviderOptions, "sharedKey"); + options.shared_key = sharedKey; + + + auto uncryptedMagicBytes = findVector(keyProviderOptions, "uncryptedMagicBytes"); + if (uncryptedMagicBytes.size() != 0) { + options.uncrypted_magic_bytes = uncryptedMagicBytes; + } + + auto ratchetSalt = findVector(keyProviderOptions, "ratchetSalt"); + if (ratchetSalt.size() == 0) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "ratchetSalt is null"); + return; + } + + options.ratchet_salt = ratchetSalt; + + auto ratchetWindowSize = findInt(keyProviderOptions, "ratchetWindowSize"); + if (ratchetWindowSize == -1) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "ratchetSalt is null"); + return; + } + + options.ratchet_window_size = ratchetWindowSize; + + auto failureTolerance = findInt(keyProviderOptions, "failureTolerance"); + options.failure_tolerance = failureTolerance; + + auto keyRingSize = findInt(keyProviderOptions, "keyRingSize"); + options.key_ring_size = keyRingSize; + + auto discardFrameWhenCryptorNotReady = findBoolean(keyProviderOptions, "discardFrameWhenCryptorNotReady"); + options.discard_frame_when_cryptor_not_ready = discardFrameWhenCryptorNotReady; + + auto keyProvider = libwebrtc::KeyProvider::Create(&options); + if (nullptr == keyProvider.get()) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "createKeyProvider failed"); + return; + } + auto uuid = base_->GenerateUUID(); + key_providers_[uuid] = keyProvider; + EncodableMap params; + params[EncodableValue("keyProviderId")] = uuid; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetSharedKey(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetSharedKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetSharedKeyFailed", "keyProvider is null"); + return; + } + + auto key = findVector(constraints, "key"); + if (key.size() == 0) { + result->Error("KeyProviderSetSharedKeyFailed", "key is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetSharedKeyFailed", "keyIndex is null"); + return; + } + + + keyProvider->SetSharedKey(key_index, vector(key)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderRatchetSharedKey(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderRatchetSharedKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderRatchetSharedKeyFailed", "keyProvider is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderRatchetSharedKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->RatchetSharedKey(key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderExportSharedKey(const EncodableMap& constraints, + std::unique_ptr result) { +auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderExportSharedKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderExportSharedKeyFailed", "keyProvider is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderExportSharedKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->ExportSharedKey(key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderExportKey(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderExportKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderExportKeyFailed", "keyProvider is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderExportKeyFailed", "participantId is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderExportKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->ExportKey(participant_id, key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetSifTrailer(const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetSifTrailerFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetSifTrailerFailed", "keyProvider is null"); + return; + } + + auto sifTrailer = findVector(constraints, "sifTrailer"); + if (sifTrailer.size() == 0) { + result->Error("KeyProviderSetSifTrailerFailed", "sifTrailer is null"); + return; + } + + keyProvider->SetSifTrailer(vector(sifTrailer)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetKey( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetKeyFailed", "keyProvider is null"); + return; + } + + auto key = findVector(constraints, "key"); + if (key.size() == 0) { + result->Error("KeyProviderSetKeyFailed", "key is null"); + return; + } + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetKeyFailed", "keyIndex is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderSetKeyFailed", "participantId is null"); + return; + } + + keyProvider->SetKey(participant_id, key_index, vector(key)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderRatchetKey( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetKeysFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetKeysFailed", "keyProvider is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderSetKeyFailed", "participantId is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->RatchetKey(participant_id, key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + + +void FlutterFrameCryptor::KeyProviderDispose( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderDisposeFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderDisposeFailed", "keyProvider is null"); + return; + } + key_providers_.erase(keyProviderId); + EncodableMap params; + params[EncodableValue("result")] = "success"; + result->Success(EncodableValue(params)); +} + +} // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/common/cpp/src/flutter_media_stream.cc b/common/cpp/src/flutter_media_stream.cc new file mode 100644 index 0000000000..3f7a7a97a9 --- /dev/null +++ b/common/cpp/src/flutter_media_stream.cc @@ -0,0 +1,561 @@ +#include "flutter_media_stream.h" + +#define DEFAULT_WIDTH 1280 +#define DEFAULT_HEIGHT 720 +#define DEFAULT_FPS 30 + +namespace flutter_webrtc_plugin { + +FlutterMediaStream::FlutterMediaStream(FlutterWebRTCBase* base) : base_(base) { + base_->audio_device_->OnDeviceChange([&] { + EncodableMap info; + info[EncodableValue("event")] = "onDeviceChange"; + base_->event_channel()->Success(EncodableValue(info), false); + }); +} + +void FlutterMediaStream::GetUserMedia( + const EncodableMap& constraints, + std::unique_ptr result) { + std::string uuid = base_->GenerateUUID(); + scoped_refptr stream = + base_->factory_->CreateStream(uuid.c_str()); + + EncodableMap params; + params[EncodableValue("streamId")] = EncodableValue(uuid); + + auto it = constraints.find(EncodableValue("audio")); + if (it != constraints.end()) { + EncodableValue audio = it->second; + if (TypeIs(audio)) { + if (true == GetValue(audio)) { + GetUserAudio(constraints, stream, params); + } + } else if (TypeIs(audio)) { + GetUserAudio(constraints, stream, params); + } else { + params[EncodableValue("audioTracks")] = EncodableValue(EncodableList()); + } + } else { + params[EncodableValue("audioTracks")] = EncodableValue(EncodableList()); + } + + it = constraints.find(EncodableValue("video")); + params[EncodableValue("videoTracks")] = EncodableValue(EncodableList()); + if (it != constraints.end()) { + EncodableValue video = it->second; + if (TypeIs(video)) { + if (true == GetValue(video)) { + GetUserVideo(constraints, stream, params); + } + } else if (TypeIs(video)) { + GetUserVideo(constraints, stream, params); + } + } + + base_->local_streams_[uuid] = stream; + result->Success(EncodableValue(params)); +} + +void addDefaultAudioConstraints( + scoped_refptr audioConstraints) { + audioConstraints->AddOptionalConstraint("googNoiseSuppression", "true"); + audioConstraints->AddOptionalConstraint("googEchoCancellation", "true"); + audioConstraints->AddOptionalConstraint("echoCancellation", "true"); + audioConstraints->AddOptionalConstraint("googEchoCancellation2", "true"); + audioConstraints->AddOptionalConstraint("googDAEchoCancellation", "true"); +} + +std::string getSourceIdConstraint(const EncodableMap& mediaConstraints) { + auto it = mediaConstraints.find(EncodableValue("optional")); + if (it != mediaConstraints.end() && TypeIs(it->second)) { + EncodableList optional = GetValue(it->second); + for (size_t i = 0, size = optional.size(); i < size; i++) { + if (TypeIs(optional[i])) { + EncodableMap option = GetValue(optional[i]); + auto it2 = option.find(EncodableValue("sourceId")); + if (it2 != option.end() && TypeIs(it2->second)) { + return GetValue(it2->second); + } + } + } + } + return ""; +} + +std::string getDeviceIdConstraint(const EncodableMap& mediaConstraints) { + auto it = mediaConstraints.find(EncodableValue("deviceId")); + if (it != mediaConstraints.end() && TypeIs(it->second)) { + return GetValue(it->second); + } + return ""; +} + +void FlutterMediaStream::GetUserAudio(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params) { + bool enable_audio = false; + scoped_refptr audioConstraints; + std::string sourceId; + std::string deviceId; + auto it = constraints.find(EncodableValue("audio")); + if (it != constraints.end()) { + EncodableValue audio = it->second; + if (TypeIs(audio)) { + audioConstraints = RTCMediaConstraints::Create(); + addDefaultAudioConstraints(audioConstraints); + enable_audio = GetValue(audio); + sourceId = ""; + deviceId = ""; + } + if (TypeIs(audio)) { + EncodableMap localMap = GetValue(audio); + sourceId = getSourceIdConstraint(localMap); + deviceId = getDeviceIdConstraint(localMap); + audioConstraints = base_->ParseMediaConstraints(localMap); + enable_audio = true; + } + } + + // Selecting audio input device by sourceId and audio output device by + // deviceId + + if (enable_audio) { + char strRecordingName[256]; + char strRecordingGuid[256]; + int playout_devices = base_->audio_device_->PlayoutDevices(); + int recording_devices = base_->audio_device_->RecordingDevices(); + + for (uint16_t i = 0; i < recording_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, strRecordingName, + strRecordingGuid); + if (sourceId != "" && sourceId == strRecordingGuid) { + base_->audio_device_->SetRecordingDevice(i); + } + } + + if (sourceId == "") { + base_->audio_device_->RecordingDeviceName(0, strRecordingName, + strRecordingGuid); + sourceId = strRecordingGuid; + } + + char strPlayoutName[256]; + char strPlayoutGuid[256]; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, strPlayoutName, + strPlayoutGuid); + if (deviceId != "" && deviceId == strPlayoutGuid) { + base_->audio_device_->SetPlayoutDevice(i); + } + } + + scoped_refptr source = + base_->factory_->CreateAudioSource("audio_input"); + std::string uuid = base_->GenerateUUID(); + scoped_refptr track = + base_->factory_->CreateAudioTrack(source, uuid.c_str()); + + std::string track_id = track->id().std_string(); + + EncodableMap track_info; + track_info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + track_info[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + track_info[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + track_info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + EncodableMap settings; + settings[EncodableValue("deviceId")] = EncodableValue(sourceId); + settings[EncodableValue("kind")] = EncodableValue("audioinput"); + settings[EncodableValue("autoGainControl")] = EncodableValue(true); + settings[EncodableValue("echoCancellation")] = EncodableValue(true); + settings[EncodableValue("noiseSuppression")] = EncodableValue(true); + settings[EncodableValue("channelCount")] = EncodableValue(1); + settings[EncodableValue("latency")] = EncodableValue(0); + track_info[EncodableValue("settings")] = EncodableValue(settings); + + EncodableList audioTracks; + audioTracks.push_back(EncodableValue(track_info)); + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + stream->AddTrack(track); + + base_->local_tracks_[track->id().std_string()] = track; + } +} + +std::string getFacingMode(const EncodableMap& mediaConstraints) { + return mediaConstraints.find(EncodableValue("facingMode")) != + mediaConstraints.end() + ? GetValue( + mediaConstraints.find(EncodableValue("facingMode"))->second) + : ""; +} + +EncodableValue getConstrainInt(const EncodableMap& constraints, + const std::string& key) { + EncodableValue value; + auto it = constraints.find(EncodableValue(key)); + if (it != constraints.end()) { + if (TypeIs(it->second)) { + return it->second; + } + + if (TypeIs(it->second)) { + EncodableMap innerMap = GetValue(it->second); + auto it2 = innerMap.find(EncodableValue("ideal")); + if (it2 != innerMap.end() && TypeIs(it2->second)) { + return it2->second; + } + } + } + + return EncodableValue(); +} + +void FlutterMediaStream::GetUserVideo(const EncodableMap& constraints, + scoped_refptr stream, + EncodableMap& params) { + EncodableMap video_constraints; + EncodableMap video_mandatory; + auto it = constraints.find(EncodableValue("video")); + if (it != constraints.end() && TypeIs(it->second)) { + video_constraints = GetValue(it->second); + if (video_constraints.find(EncodableValue("mandatory")) != + video_constraints.end()) { + video_mandatory = GetValue( + video_constraints.find(EncodableValue("mandatory"))->second); + } + } + + std::string facing_mode = getFacingMode(video_constraints); + // bool isFacing = facing_mode == "" || facing_mode != "environment"; + std::string sourceId = getSourceIdConstraint(video_constraints); + + EncodableValue widthValue = getConstrainInt(video_constraints, "width"); + + if (widthValue == EncodableValue()) + widthValue = findEncodableValue(video_mandatory, "minWidth"); + + if (widthValue == EncodableValue()) + widthValue = findEncodableValue(video_mandatory, "width"); + + EncodableValue heightValue = getConstrainInt(video_constraints, "height"); + + if (heightValue == EncodableValue()) + heightValue = findEncodableValue(video_mandatory, "minHeight"); + + if (heightValue == EncodableValue()) + heightValue = findEncodableValue(video_mandatory, "height"); + + EncodableValue fpsValue = getConstrainInt(video_constraints, "frameRate"); + + if (fpsValue == EncodableValue()) + fpsValue = findEncodableValue(video_mandatory, "minFrameRate"); + + if (fpsValue == EncodableValue()) + fpsValue = findEncodableValue(video_mandatory, "frameRate"); + + scoped_refptr video_capturer; + char strNameUTF8[256]; + char strGuidUTF8[256]; + int nb_video_devices = base_->video_device_->NumberOfDevices(); + + int32_t width = toInt(widthValue, DEFAULT_WIDTH); + int32_t height = toInt(heightValue, DEFAULT_HEIGHT); + int32_t fps = toInt(fpsValue, DEFAULT_FPS); + + for (int i = 0; i < nb_video_devices; i++) { + base_->video_device_->GetDeviceName(i, strNameUTF8, 256, strGuidUTF8, 256); + if (sourceId != "" && sourceId == strGuidUTF8) { + video_capturer = + base_->video_device_->Create(strNameUTF8, i, width, height, fps); + break; + } + } + + if (nb_video_devices == 0) + return; + + if (!video_capturer.get()) { + base_->video_device_->GetDeviceName(0, strNameUTF8, 128, strGuidUTF8, 128); + sourceId = strGuidUTF8; + video_capturer = + base_->video_device_->Create(strNameUTF8, 0, width, height, fps); + } + + if (!video_capturer.get()) + return; + + video_capturer->StartCapture(); + + const char* video_source_label = "video_input"; + scoped_refptr source = base_->factory_->CreateVideoSource( + video_capturer, video_source_label, + base_->ParseMediaConstraints(video_constraints)); + + std::string uuid = base_->GenerateUUID(); + scoped_refptr track = + base_->factory_->CreateVideoTrack(source, uuid.c_str()); + + EncodableList videoTracks; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + EncodableMap settings; + settings[EncodableValue("deviceId")] = EncodableValue(sourceId); + settings[EncodableValue("kind")] = EncodableValue("videoinput"); + settings[EncodableValue("width")] = EncodableValue(width); + settings[EncodableValue("height")] = EncodableValue(height); + settings[EncodableValue("frameRate")] = EncodableValue(fps); + info[EncodableValue("settings")] = EncodableValue(settings); + + videoTracks.push_back(EncodableValue(info)); + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + stream->AddTrack(track); + + base_->local_tracks_[track->id().std_string()] = track; + base_->video_capturers_[track->id().std_string()] = video_capturer; +} + +void FlutterMediaStream::GetSources(std::unique_ptr result) { + EncodableList sources; + + int nb_audio_devices = base_->audio_device_->RecordingDevices(); + char strNameUTF8[RTCAudioDevice::kAdmMaxDeviceNameSize + 1] = {0}; + char strGuidUTF8[RTCAudioDevice::kAdmMaxGuidSize + 1] = {0}; + + for (uint16_t i = 0; i < nb_audio_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, strNameUTF8, strGuidUTF8); + std::string device_id = strlen(strGuidUTF8) > 0 ? std::string(strGuidUTF8) + : std::string(strNameUTF8); + EncodableMap audio; + audio[EncodableValue("label")] = EncodableValue(std::string(strNameUTF8)); + audio[EncodableValue("deviceId")] = EncodableValue(device_id); + audio[EncodableValue("facing")] = ""; + audio[EncodableValue("kind")] = "audioinput"; + sources.push_back(EncodableValue(audio)); + } + + nb_audio_devices = base_->audio_device_->PlayoutDevices(); + for (uint16_t i = 0; i < nb_audio_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, strNameUTF8, strGuidUTF8); + std::string device_id = strlen(strGuidUTF8) > 0 ? std::string(strGuidUTF8) + : std::string(strNameUTF8); + EncodableMap audio; + audio[EncodableValue("label")] = EncodableValue(std::string(strNameUTF8)); + audio[EncodableValue("deviceId")] = EncodableValue(device_id); + audio[EncodableValue("facing")] = ""; + audio[EncodableValue("kind")] = "audiooutput"; + sources.push_back(EncodableValue(audio)); + } + + int nb_video_devices = base_->video_device_->NumberOfDevices(); + for (int i = 0; i < nb_video_devices; i++) { + base_->video_device_->GetDeviceName(i, strNameUTF8, 128, strGuidUTF8, 128); + EncodableMap video; + video[EncodableValue("label")] = EncodableValue(std::string(strNameUTF8)); + video[EncodableValue("deviceId")] = + EncodableValue(std::string(strGuidUTF8)); + video[EncodableValue("facing")] = i == 1 ? "front" : "back"; + video[EncodableValue("kind")] = "videoinput"; + sources.push_back(EncodableValue(video)); + } + EncodableMap params; + params[EncodableValue("sources")] = EncodableValue(sources); + result->Success(EncodableValue(params)); +} + +void FlutterMediaStream::SelectAudioOutput( + const std::string& device_id, + std::unique_ptr result) { + char deviceName[256]; + char deviceGuid[256]; + int playout_devices = base_->audio_device_->PlayoutDevices(); + bool found = false; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, deviceName, deviceGuid); + std::string cur_device_id = strlen(deviceGuid) > 0 + ? std::string(deviceGuid) + : std::string(deviceName); + if (device_id != "" && device_id == cur_device_id) { + base_->audio_device_->SetPlayoutDevice(i); + found = true; + break; + } + } + if (!found) { + result->Error("Bad Arguments", "Not found device id: " + device_id); + return; + } + result->Success(); +} + +void FlutterMediaStream::SelectAudioInput( + const std::string& device_id, + std::unique_ptr result) { + char deviceName[256]; + char deviceGuid[256]; + int playout_devices = base_->audio_device_->RecordingDevices(); + bool found = false; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, deviceName, deviceGuid); + std::string cur_device_id = strlen(deviceGuid) > 0 + ? std::string(deviceGuid) + : std::string(deviceName); + if (device_id != "" && device_id == cur_device_id) { + base_->audio_device_->SetRecordingDevice(i); + found = true; + break; + } + } + if (!found) { + result->Error("Bad Arguments", "Not found device id: " + device_id); + return; + } + result->Success(); +} + +void FlutterMediaStream::MediaStreamGetTracks( + const std::string& stream_id, + std::unique_ptr result) { + scoped_refptr stream = base_->MediaStreamForId(stream_id); + + if (stream) { + EncodableMap params; + EncodableList audioTracks; + + auto audio_tracks = stream->audio_tracks(); + for (auto track : audio_tracks.std_vector()) { + base_->local_tracks_[track->id().std_string()] = track; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + info[EncodableValue("remote")] = EncodableValue(true); + info[EncodableValue("readyState")] = "live"; + audioTracks.push_back(EncodableValue(info)); + } + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + + EncodableList videoTracks; + auto video_tracks = stream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + base_->local_tracks_[track->id().std_string()] = track; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + info[EncodableValue("remote")] = EncodableValue(true); + info[EncodableValue("readyState")] = "live"; + videoTracks.push_back(EncodableValue(info)); + } + + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + result->Success(EncodableValue(params)); + } else { + result->Error("MediaStreamGetTracksFailed", + "MediaStreamGetTracks() media stream is null !"); + } +} + +void FlutterMediaStream::MediaStreamDispose( + const std::string& stream_id, + std::unique_ptr result) { + scoped_refptr stream = base_->MediaStreamForId(stream_id); + + if (!stream) { + result->Error("MediaStreamDisposeFailed", + "stream [" + stream_id + "] not found!"); + return; + } + + vector> audio_tracks = stream->audio_tracks(); + + for (auto track : audio_tracks.std_vector()) { + stream->RemoveTrack(track); + base_->local_tracks_.erase(track->id().std_string()); + } + + vector> video_tracks = stream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + stream->RemoveTrack(track); + base_->local_tracks_.erase(track->id().std_string()); + if (base_->video_capturers_.find(track->id().std_string()) != + base_->video_capturers_.end()) { + auto video_capture = base_->video_capturers_[track->id().std_string()]; + if (video_capture->CaptureStarted()) { + video_capture->StopCapture(); + } + base_->video_capturers_.erase(track->id().std_string()); + } + } + + base_->RemoveStreamForId(stream_id); + result->Success(); +} + +void FlutterMediaStream::CreateLocalMediaStream( + std::unique_ptr result) { + std::string uuid = base_->GenerateUUID(); + scoped_refptr stream = + base_->factory_->CreateStream(uuid.c_str()); + + EncodableMap params; + params[EncodableValue("streamId")] = EncodableValue(uuid); + + base_->local_streams_[uuid] = stream; + result->Success(EncodableValue(params)); +} + +void FlutterMediaStream::MediaStreamTrackSetEnable( + const std::string& track_id, + std::unique_ptr result) { + result->NotImplemented(); +} + +void FlutterMediaStream::MediaStreamTrackSwitchCamera( + const std::string& track_id, + std::unique_ptr result) { + result->NotImplemented(); +} + +void FlutterMediaStream::MediaStreamTrackDispose( + const std::string& track_id, + std::unique_ptr result) { + for (auto it : base_->local_streams_) { + auto stream = it.second; + auto audio_tracks = stream->audio_tracks(); + for (auto track : audio_tracks.std_vector()) { + if (track->id().std_string() == track_id) { + stream->RemoveTrack(track); + } + } + auto video_tracks = stream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + if (track->id().std_string() == track_id) { + stream->RemoveTrack(track); + + if (base_->video_capturers_.find(track_id) != + base_->video_capturers_.end()) { + auto video_capture = base_->video_capturers_[track_id]; + if (video_capture->CaptureStarted()) { + video_capture->StopCapture(); + } + base_->video_capturers_.erase(track_id); + } + } + } + } + base_->RemoveMediaTrackForId(track_id); + result->Success(); +} +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_peerconnection.cc b/common/cpp/src/flutter_peerconnection.cc new file mode 100644 index 0000000000..691ec29f19 --- /dev/null +++ b/common/cpp/src/flutter_peerconnection.cc @@ -0,0 +1,1387 @@ +#include "flutter_peerconnection.h" + +#include "base/scoped_ref_ptr.h" +#include "flutter_data_channel.h" +#include "flutter_frame_capturer.h" +#include "rtc_dtmf_sender.h" +#include "rtc_rtp_parameters.h" + +namespace flutter_webrtc_plugin { + +std::string RTCMediaTypeToString(RTCMediaType type) { + switch (type) { + case libwebrtc::RTCMediaType::AUDIO: + return "audio"; + case libwebrtc::RTCMediaType::VIDEO: + return "video"; + case libwebrtc::RTCMediaType::DATA: + return "data"; + case libwebrtc::RTCMediaType::UNSUPPORTED: + return "unsupported"; + } + return ""; +} + +std::string transceiverDirectionString(RTCRtpTransceiverDirection direction) { + switch (direction) { + case RTCRtpTransceiverDirection::kSendRecv: + return "sendrecv"; + case RTCRtpTransceiverDirection::kSendOnly: + return "sendonly"; + case RTCRtpTransceiverDirection::kRecvOnly: + return "recvonly"; + case RTCRtpTransceiverDirection::kInactive: + return "inactive"; + case RTCRtpTransceiverDirection::kStopped: + return "stoped"; + } + return ""; +} + +const char* iceConnectionStateString(RTCIceConnectionState state) { + switch (state) { + case RTCIceConnectionStateNew: + return "new"; + case RTCIceConnectionStateChecking: + return "checking"; + case RTCIceConnectionStateConnected: + return "connected"; + case RTCIceConnectionStateCompleted: + return "completed"; + case RTCIceConnectionStateFailed: + return "failed"; + case RTCIceConnectionStateDisconnected: + return "disconnected"; + case RTCIceConnectionStateClosed: + return "closed"; + case RTCIceConnectionStateMax: + return "statemax"; + } + return ""; +} + +const char* signalingStateString(RTCSignalingState state) { + switch (state) { + case RTCSignalingStateStable: + return "stable"; + case RTCSignalingStateHaveLocalOffer: + return "have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: + return "have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: + return "have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: + return "have-remote-pranswer"; + case RTCSignalingStateClosed: + return "closed"; + } + return ""; +} + +const char* peerConnectionStateString(RTCPeerConnectionState state) { + switch (state) { + case RTCPeerConnectionStateNew: + return "new"; + case RTCPeerConnectionStateConnecting: + return "connecting"; + case RTCPeerConnectionStateConnected: + return "connected"; + case RTCPeerConnectionStateDisconnected: + return "disconnected"; + case RTCPeerConnectionStateFailed: + return "failed"; + case RTCPeerConnectionStateClosed: + return "closed"; + } + return ""; +} + +const char* iceGatheringStateString(RTCIceGatheringState state) { + switch (state) { + case RTCIceGatheringStateNew: + return "new"; + case RTCIceGatheringStateGathering: + return "gathering"; + case RTCIceGatheringStateComplete: + return "complete"; + } + return ""; +} + +EncodableMap rtpParametersToMap( + libwebrtc::scoped_refptr rtpParameters) { + EncodableMap info; + info[EncodableValue("transactionId")] = + EncodableValue(rtpParameters->transaction_id().std_string()); + + EncodableMap rtcp; + rtcp[EncodableValue("cname")] = + EncodableValue(rtpParameters->rtcp_parameters()->cname().std_string()); + rtcp[EncodableValue("reducedSize")] = + EncodableValue(rtpParameters->rtcp_parameters()->reduced_size()); + + info[EncodableValue("rtcp")] = EncodableValue((rtcp)); + + EncodableList headerExtensions; + auto header_extensions = rtpParameters->header_extensions(); + for (scoped_refptr extension : + header_extensions.std_vector()) { + EncodableMap map; + map[EncodableValue("uri")] = EncodableValue(extension->uri().std_string()); + map[EncodableValue("id")] = EncodableValue(extension->id()); + map[EncodableValue("encrypted")] = EncodableValue(extension->encrypt()); + headerExtensions.push_back(EncodableValue(map)); + } + info[EncodableValue("headerExtensions")] = EncodableValue(headerExtensions); + + EncodableList encodings_info; + auto encodings = rtpParameters->encodings(); + for (scoped_refptr encoding : + encodings.std_vector()) { + EncodableMap map; + map[EncodableValue("active")] = EncodableValue(encoding->active()); + map[EncodableValue("maxBitrate")] = + EncodableValue(encoding->max_bitrate_bps()); + map[EncodableValue("minBitrate")] = + EncodableValue(encoding->min_bitrate_bps()); + map[EncodableValue("maxFramerate")] = + EncodableValue(static_cast(encoding->max_framerate())); + map[EncodableValue("scaleResolutionDownBy")] = + EncodableValue(encoding->scale_resolution_down_by()); + map[EncodableValue("scalabilityMode")] = + EncodableValue(encoding->scalability_mode().std_string()); + map[EncodableValue("ssrc")] = + EncodableValue(static_cast(encoding->ssrc())); + encodings_info.push_back(EncodableValue(map)); + } + info[EncodableValue("encodings")] = EncodableValue(encodings_info); + + EncodableList codecs_info; + auto codecs = rtpParameters->codecs(); + for (scoped_refptr codec : codecs.std_vector()) { + EncodableMap map; + map[EncodableValue("name")] = EncodableValue(codec->name().std_string()); + map[EncodableValue("payloadType")] = EncodableValue(codec->payload_type()); + map[EncodableValue("clockRate")] = EncodableValue(codec->clock_rate()); + map[EncodableValue("numChannels")] = EncodableValue(codec->num_channels()); + + EncodableMap param; + auto parameters = codec->parameters(); + for (auto item : parameters.std_vector()) { + param[EncodableValue(item.first.std_string())] = + EncodableValue(item.second.std_string()); + } + map[EncodableValue("parameters")] = EncodableValue(param); + + map[EncodableValue("kind")] = + EncodableValue(RTCMediaTypeToString(codec->kind())); + + codecs_info.push_back(EncodableValue(map)); + } + info[EncodableValue("codecs")] = EncodableValue(codecs_info); + + switch (rtpParameters->GetDegradationPreference()) { + case libwebrtc::RTCDegradationPreference::MAINTAIN_FRAMERATE: + info[EncodableValue("degradationPreference")] = + EncodableValue("maintain-framerate"); + break; + case libwebrtc::RTCDegradationPreference::MAINTAIN_RESOLUTION: + info[EncodableValue("degradationPreference")] = + EncodableValue("maintain-resolution"); + break; + case libwebrtc::RTCDegradationPreference::BALANCED: + info[EncodableValue("degradationPreference")] = + EncodableValue("balanced"); + break; + case libwebrtc::RTCDegradationPreference::DISABLED: + info[EncodableValue("degradationPreference")] = + EncodableValue("disabled"); + break; + default: + info[EncodableValue("degradationPreference")] = + EncodableValue("balanced"); + break; + } + + return info; +} + +EncodableMap dtmfSenderToMap(scoped_refptr dtmfSender, + std::string id) { + EncodableMap info; + if (nullptr != dtmfSender.get()) { + info[EncodableValue("dtmfSenderId")] = EncodableValue(id); + if (dtmfSender.get()) { + info[EncodableValue("interToneGap")] = + EncodableValue(dtmfSender->inter_tone_gap()); + info[EncodableValue("duration")] = EncodableValue(dtmfSender->duration()); + } + } + return info; +} + +EncodableMap mediaTrackToMap( + libwebrtc::scoped_refptr track) { + EncodableMap info; + if (nullptr == track.get()) { + return info; + } + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("video")) { + info[EncodableValue("readyState")] = + EncodableValue(static_cast(track.get())->state()); + info[EncodableValue("label")] = EncodableValue("video"); + } else if (0 == kind.compare("audio")) { + info[EncodableValue("readyState")] = + EncodableValue(static_cast(track.get())->state()); + info[EncodableValue("label")] = EncodableValue("audio"); + } + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + return info; +} + +EncodableMap rtpSenderToMap( + libwebrtc::scoped_refptr sender) { + EncodableMap info; + std::string id = sender->id().std_string(); + info[EncodableValue("senderId")] = EncodableValue(id); + info[EncodableValue("ownsTrack")] = EncodableValue(true); + info[EncodableValue("dtmfSender")] = + EncodableValue(dtmfSenderToMap(sender->dtmf_sender(), id)); + info[EncodableValue("rtpParameters")] = + EncodableValue(rtpParametersToMap(sender->parameters())); + info[EncodableValue("track")] = + EncodableValue(mediaTrackToMap(sender->track())); + return info; +} + +std::string trackStateToString(libwebrtc::RTCMediaTrack::RTCTrackState state) { + switch (state) { + case libwebrtc::RTCMediaTrack::kLive: + return "live"; + case libwebrtc::RTCMediaTrack::kEnded: + return "ended"; + default: + return ""; + } +} + +EncodableMap rtpReceiverToMap( + libwebrtc::scoped_refptr receiver) { + EncodableMap info; + info[EncodableValue("receiverId")] = + EncodableValue(receiver->id().std_string()); + info[EncodableValue("rtpParameters")] = + EncodableValue(rtpParametersToMap(receiver->parameters())); + info[EncodableValue("track")] = + EncodableValue(mediaTrackToMap(receiver->track())); + return info; +} + +EncodableMap transceiverToMap(scoped_refptr transceiver) { + EncodableMap info; + info[EncodableValue("transceiverId")] = + EncodableValue(transceiver->transceiver_id().std_string()); + info[EncodableValue("mid")] = EncodableValue(transceiver->mid().std_string()); + info[EncodableValue("direction")] = + EncodableValue(transceiverDirectionString(transceiver->direction())); + info[EncodableValue("sender")] = + EncodableValue(rtpSenderToMap(transceiver->sender())); + info[EncodableValue("receiver")] = + EncodableValue(rtpReceiverToMap(transceiver->receiver())); + return info; +} + +EncodableMap mediaStreamToMap(scoped_refptr stream, + std::string id) { + EncodableMap params; + params[EncodableValue("streamId")] = + EncodableValue(stream->id().std_string()); + params[EncodableValue("ownerTag")] = EncodableValue(id); + EncodableList audioTracks; + auto audio_tracks = stream->audio_tracks(); + for (scoped_refptr val : audio_tracks.std_vector()) { + audioTracks.push_back(EncodableValue(mediaTrackToMap(val))); + } + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + + EncodableList videoTracks; + auto video_tracks = stream->video_tracks(); + for (scoped_refptr val : video_tracks.std_vector()) { + videoTracks.push_back(EncodableValue(mediaTrackToMap(val))); + } + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + return params; +} + +void FlutterPeerConnection::CreateRTCPeerConnection( + const EncodableMap& configurationMap, + const EncodableMap& constraintsMap, + std::unique_ptr result) { + // std::cout << " configuration = " << configurationMap.StringValue() << + // std::endl; + base_->ParseRTCConfiguration(configurationMap, base_->configuration_); + // std::cout << " constraints = " << constraintsMap.StringValue() << + // std::endl; + scoped_refptr constraints = + base_->ParseMediaConstraints(constraintsMap); + + std::string uuid = base_->GenerateUUID(); + scoped_refptr pc = + base_->factory_->Create(base_->configuration_, constraints); + base_->peerconnections_[uuid] = pc; + + std::string event_channel = "FlutterWebRTC/peerConnectionEvent" + uuid; + + std::unique_ptr observer( + new FlutterPeerConnectionObserver(base_, pc, base_->messenger_, + base_->task_runner_, + event_channel, uuid)); + + base_->peerconnection_observers_[uuid] = std::move(observer); + + EncodableMap params; + params[EncodableValue("peerConnectionId")] = EncodableValue(uuid); + result->Success(EncodableValue(params)); +} + +void FlutterPeerConnection::RTCPeerConnectionClose( + RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result) { + auto it2 = base_->peerconnections_.find(uuid); + if (it2 != base_->peerconnections_.end()) { + it2->second->Close(); + base_->peerconnections_.erase(it2); + } + + auto it = base_->peerconnection_observers_.find(uuid); + if (it != base_->peerconnection_observers_.end()) + base_->peerconnection_observers_.erase(it); + + result->Success(); +} + +void FlutterPeerConnection::RTCPeerConnectionDispose( + RTCPeerConnection* pc, + const std::string& uuid, + std::unique_ptr result) { + result->Success(); +} + +void FlutterPeerConnection::CreateOffer( + const EncodableMap& constraintsMap, + RTCPeerConnection* pc, + std::unique_ptr result) { + scoped_refptr constraints = + base_->ParseMediaConstraints(constraintsMap); + std::shared_ptr result_ptr(result.release()); + pc->CreateOffer( + [result_ptr](const libwebrtc::string sdp, const libwebrtc::string type) { + EncodableMap params; + params[EncodableValue("sdp")] = EncodableValue(sdp.std_string()); + params[EncodableValue("type")] = EncodableValue(type.std_string()); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("createOfferFailed", error); + }, + constraints); +} + +void FlutterPeerConnection::CreateAnswer( + const EncodableMap& constraintsMap, + RTCPeerConnection* pc, + std::unique_ptr result) { + scoped_refptr constraints = + base_->ParseMediaConstraints(constraintsMap); + std::shared_ptr result_ptr(result.release()); + pc->CreateAnswer( + [result_ptr](const libwebrtc::string sdp, const libwebrtc::string type) { + EncodableMap params; + params[EncodableValue("sdp")] = EncodableValue(sdp.std_string()); + params[EncodableValue("type")] = EncodableValue(type.std_string()); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("createAnswerFailed", error); + }, + constraints); +} + +void FlutterPeerConnection::SetLocalDescription( + RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->SetLocalDescription( + sdp->sdp(), sdp->type(), [result_ptr]() { result_ptr->Success(); }, + [result_ptr](const char* error) { + result_ptr->Error("setLocalDescriptionFailed", error); + }); +} + +void FlutterPeerConnection::SetRemoteDescription( + RTCSessionDescription* sdp, + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->SetRemoteDescription( + sdp->sdp(), sdp->type(), [result_ptr]() { result_ptr->Success(); }, + [result_ptr](const char* error) { + result_ptr->Error("setRemoteDescriptionFailed", error); + }); +} + +void FlutterPeerConnection::GetLocalDescription( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->GetLocalDescription( + [result_ptr](const char* sdp, const char* type) { + EncodableMap params; + params[EncodableValue("sdp")] = sdp; + params[EncodableValue("type")] = type; + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("getLocalDescriptionFailed", error); + }); +} + +void FlutterPeerConnection::GetRemoteDescription( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + pc->GetRemoteDescription( + [result_ptr](const char* sdp, const char* type) { + EncodableMap params; + params[EncodableValue("sdp")] = sdp; + params[EncodableValue("type")] = type; + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("getRemoteDescriptionFailed", error); + }); +} + +scoped_refptr +FlutterPeerConnection::mapToRtpTransceiverInit(const EncodableMap& params) { + EncodableList streamIds = findList(params, "streamIds"); + + std::vector stream_ids; + for (auto item : streamIds) { + std::string id = GetValue(item); + stream_ids.push_back(id.c_str()); + } + RTCRtpTransceiverDirection dir = RTCRtpTransceiverDirection::kInactive; + EncodableValue direction = findEncodableValue(params, "direction"); + if (!direction.IsNull()) { + dir = stringToTransceiverDirection(GetValue(direction)); + } + EncodableList sendEncodings = findList(params, "sendEncodings"); + std::vector> encodings; + for (EncodableValue value : sendEncodings) { + encodings.push_back(mapToEncoding(GetValue(value))); + } + scoped_refptr init = + RTCRtpTransceiverInit::Create(dir, stream_ids, encodings); + return init; +} + +RTCRtpTransceiverDirection FlutterPeerConnection::stringToTransceiverDirection( + std::string direction) { + if (0 == direction.compare("sendrecv")) { + return RTCRtpTransceiverDirection::kSendRecv; + } else if (0 == direction.compare("sendonly")) { + return RTCRtpTransceiverDirection::kSendOnly; + } else if (0 == direction.compare("recvonly")) { + return RTCRtpTransceiverDirection::kRecvOnly; + } else if (0 == direction.compare("stoped")) { + return RTCRtpTransceiverDirection::kStopped; + } else if (0 == direction.compare("inactive")) { + return RTCRtpTransceiverDirection::kInactive; + } + return RTCRtpTransceiverDirection::kInactive; +} + +libwebrtc::scoped_refptr +FlutterPeerConnection::mapToEncoding(const EncodableMap& params) { + libwebrtc::scoped_refptr encoding = + RTCRtpEncodingParameters::Create(); + + encoding->set_active(true); + encoding->set_scale_resolution_down_by(1.0); + + EncodableValue value = findEncodableValue(params, "active"); + if (!value.IsNull()) { + encoding->set_active(GetValue(value)); + } + + value = findEncodableValue(params, "rid"); + if (!value.IsNull()) { + const std::string rid = GetValue(value); + encoding->set_rid(rid.c_str()); + } + + value = findEncodableValue(params, "ssrc"); + if (!value.IsNull()) { + encoding->set_ssrc((uint32_t)GetValue(value)); + } + + value = findEncodableValue(params, "minBitrate"); + if (!value.IsNull()) { + encoding->set_min_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(params, "maxBitrate"); + if (!value.IsNull()) { + encoding->set_max_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(params, "maxFramerate"); + if (!value.IsNull()) { + encoding->set_max_framerate(GetValue(value)); + } + + value = findEncodableValue(params, "numTemporalLayers"); + if (!value.IsNull()) { + encoding->set_num_temporal_layers(GetValue(value)); + } + + value = findEncodableValue(params, "scaleResolutionDownBy"); + if (!value.IsNull()) { + encoding->set_scale_resolution_down_by(GetValue(value)); + } + + value = findEncodableValue(params, "scalabilityMode"); + if (!value.IsNull()) { + encoding->set_scalability_mode(GetValue(value)); + } + + return encoding; +} + +RTCMediaType stringToMediaType(const std::string& mediaType) { + RTCMediaType type = RTCMediaType::UNSUPPORTED; + if (mediaType == "audio") + type = RTCMediaType::AUDIO; + else if (mediaType == "video") + type = RTCMediaType::VIDEO; + else if (mediaType == "data") + type = RTCMediaType::DATA; + return type; +} + +void FlutterPeerConnection::AddTransceiver( + RTCPeerConnection* pc, + const std::string& trackId, + const std::string& mediaType, + const EncodableMap& transceiverInit, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + RTCMediaTrack* track = base_->MediaTrackForId(trackId); + RTCMediaType type = stringToMediaType(mediaType); + + if (0 < transceiverInit.size()) { + auto transceiver = + track != nullptr ? pc->AddTransceiver( + track, mapToRtpTransceiverInit(transceiverInit)) + : pc->AddTransceiver( + type, mapToRtpTransceiverInit(transceiverInit)); + if (nullptr != transceiver.get()) { + result_ptr->Success(EncodableValue(transceiverToMap(transceiver))); + return; + } + result_ptr->Error("AddTransceiver(track | mediaType, init)", + "AddTransceiver error"); + } else { + auto transceiver = + track != nullptr ? pc->AddTransceiver(track) : pc->AddTransceiver(type); + if (nullptr != transceiver.get()) { + result_ptr->Success(EncodableValue(transceiverToMap(transceiver))); + return; + } + result_ptr->Error("AddTransceiver(track, mediaType)", + "AddTransceiver error"); + } +} + +void FlutterPeerConnection::GetTransceivers( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + EncodableMap map; + EncodableList info; + auto transceivers = pc->transceivers(); + for (scoped_refptr transceiver : + transceivers.std_vector()) { + info.push_back(EncodableValue(transceiverToMap(transceiver))); + } + map[EncodableValue("transceivers")] = EncodableValue(info); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::GetReceivers( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + EncodableMap map; + EncodableList info; + auto receivers = pc->receivers(); + for (scoped_refptr receiver : receivers.std_vector()) { + info.push_back(EncodableValue(rtpReceiverToMap(receiver))); + } + map[EncodableValue("receivers")] = EncodableValue(info); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::RtpSenderSetTrack( + RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetTrack", "sender is null"); + return; + } + sender->set_track(track); + result_ptr->Success(); +} + +void FlutterPeerConnection::RtpSenderSetStream( + RTCPeerConnection* pc, + std::vector streamIds, + std::string rtpSenderId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetTrack", "sender is null"); + return; + } + sender->set_stream_ids(streamIds); + result_ptr->Success(); +} + +void FlutterPeerConnection::RtpSenderReplaceTrack( + RTCPeerConnection* pc, + RTCMediaTrack* track, + std::string rtpSenderId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderReplaceTrack", "sender is null"); + return; + } + + sender->set_track(track); + + result_ptr->Success(); +} + +scoped_refptr FlutterPeerConnection::updateRtpParameters( + EncodableMap newParameters, + scoped_refptr parameters) { + EncodableList encodings = findList(newParameters, "encodings"); + auto encoding = encodings.begin(); + auto params = parameters->encodings(); + for (auto param : params.std_vector()) { + if (encoding != encodings.end()) { + EncodableMap map = GetValue(*encoding); + EncodableValue value = findEncodableValue(map, "active"); + if (!value.IsNull()) { + param->set_active(GetValue(value)); + } + value = findEncodableValue(map, "rid"); + if (!value.IsNull()) { + param->set_rid(GetValue(value)); + } + value = findEncodableValue(map, "ssrc"); + if (!value.IsNull()) { + param->set_ssrc(GetValue(value)); + } + value = findEncodableValue(map, "maxBitrate"); + if (!value.IsNull()) { + param->set_max_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(map, "minBitrate"); + if (!value.IsNull()) { + param->set_min_bitrate_bps(GetValue(value)); + } + + value = findEncodableValue(map, "maxFramerate"); + if (!value.IsNull()) { + param->set_max_framerate(GetValue(value)); + } + value = findEncodableValue(map, "numTemporalLayers"); + if (!value.IsNull()) { + param->set_num_temporal_layers(GetValue(value)); + } + value = findEncodableValue(map, "scaleResolutionDownBy"); + if (!value.IsNull()) { + param->set_scale_resolution_down_by(GetValue(value)); + } + value = findEncodableValue(map, "scalabilityMode"); + if (!value.IsNull()) { + param->set_scalability_mode(GetValue(value)); + } + encoding++; + } + } + + EncodableValue value = + findEncodableValue(newParameters, "degradationPreference"); + if (!value.IsNull()) { + const std::string degradationPreference = GetValue(value); + if (degradationPreference == "maintain-framerate") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::MAINTAIN_FRAMERATE); + } else if (degradationPreference == "maintain-resolution") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::MAINTAIN_RESOLUTION); + } else if (degradationPreference == "balanced") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::BALANCED); + } else if (degradationPreference == "disabled") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::DISABLED); + } + } + + return parameters; +} + +void FlutterPeerConnection::RtpSenderSetParameters( + RTCPeerConnection* pc, + std::string rtpSenderId, + const EncodableMap& parameters, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetParameters", "sender is null"); + return; + } + + auto param = sender->parameters(); + param = updateRtpParameters(parameters, param); + bool success = sender->set_parameters(param); + + EncodableMap map; + map[EncodableValue("result")] = EncodableValue(success); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::RtpTransceiverStop( + RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("rtpTransceiverStop", "transceiver is null"); + return; + } + transceiver->StopInternal(); + result_ptr->Success(); +} + +void FlutterPeerConnection::RtpTransceiverGetCurrentDirection( + RTCPeerConnection* pc, + std::string transceiverId, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("rtpTransceiverGetCurrentDirection", + "transceiver is null"); + return; + } + EncodableMap map; + map[EncodableValue("result")] = EncodableValue( + transceiverDirectionString(transceiver->current_direction())); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::SetConfiguration( + RTCPeerConnection* pc, + const EncodableMap& configuration, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + // TODO pc->SetConfiguration(); + + result_ptr->Success(); +} + +void FlutterPeerConnection::CaptureFrame( + RTCVideoTrack* track, + std::string path, + std::unique_ptr result) { + FlutterFrameCapturer capturer(track, path); + capturer.CaptureFrame(std::move(result)); +} + +scoped_refptr FlutterPeerConnection::getRtpTransceiverById( + RTCPeerConnection* pc, + std::string id) { + scoped_refptr result; + auto transceivers = pc->transceivers(); + for (scoped_refptr transceiver : + transceivers.std_vector()) { + if (nullptr == result.get() && + 0 == id.compare(transceiver->transceiver_id().std_string())) { + result = transceiver; + } + } + return result; +} + +void FlutterPeerConnection::RtpTransceiverSetDirection( + RTCPeerConnection* pc, + std::string transceiverId, + std::string direction, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("RtpTransceiverSetDirection", " transceiver is null "); + return; + } + auto res = transceiver->SetDirectionWithError( + stringToTransceiverDirection(direction)); + if (res.std_string() == "") { + result_ptr->Success(); + } else { + result_ptr->Error("RtpTransceiverSetDirection", res.std_string()); + } +} + +void FlutterPeerConnection::RtpTransceiverSetCodecPreferences( + RTCPeerConnection* pc, + std::string transceiverId, + const EncodableList codecs, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto transceiver = getRtpTransceiverById(pc, transceiverId); + if (nullptr == transceiver.get()) { + result_ptr->Error("RtpTransceiverSetCodecPreferences", + " transceiver is null "); + return; + } + std::vector> codecList; + for (auto codec : codecs) { + auto codecMap = GetValue(codec); + auto codecMimeType = findString(codecMap, "mimeType"); + auto codecClockRate = findInt(codecMap, "clockRate"); + auto codecNumChannels = findInt(codecMap, "channels"); + auto codecSdpFmtpLine = findString(codecMap, "sdpFmtpLine"); + auto codecCapability = RTCRtpCodecCapability::Create(); + if (codecSdpFmtpLine != std::string() && codecSdpFmtpLine.length() != 0) + codecCapability->set_sdp_fmtp_line(codecSdpFmtpLine); + codecCapability->set_clock_rate(codecClockRate); + if (codecNumChannels != -1) + codecCapability->set_channels(codecNumChannels); + codecCapability->set_mime_type(codecMimeType); + codecList.push_back(codecCapability); + } + transceiver->SetCodecPreferences(codecList); + result_ptr->Success(); +} + +void FlutterPeerConnection::GetSenders( + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + + EncodableMap map; + EncodableList info; + auto senders = pc->senders(); + for (scoped_refptr sender : senders.std_vector()) { + info.push_back(EncodableValue(rtpSenderToMap(sender))); + } + map[EncodableValue("senders")] = EncodableValue(info); + result_ptr->Success(EncodableValue(map)); +} + +void FlutterPeerConnection::AddIceCandidate( + RTCIceCandidate* candidate, + RTCPeerConnection* pc, + std::unique_ptr result) { + pc->AddCandidate(candidate->sdp_mid(), candidate->sdp_mline_index(), + candidate->candidate()); + + result->Success(); +} + +EncodableMap statsToMap(const scoped_refptr& stats) { + EncodableMap report_map; + report_map[EncodableValue("id")] = EncodableValue(stats->id().std_string()); + report_map[EncodableValue("type")] = + EncodableValue(stats->type().std_string()); + report_map[EncodableValue("timestamp")] = + EncodableValue(static_cast(stats->timestamp_us())); + EncodableMap values; + auto members = stats->Members(); + for (int i = 0; i < members.size(); i++) { + auto member = members[i]; + switch (member->GetType()) { + case RTCStatsMember::Type::kBool: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueBool()); + break; + case RTCStatsMember::Type::kInt32: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueInt32()); + break; + case RTCStatsMember::Type::kUint32: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue((int64_t)member->ValueUint32()); + break; + case RTCStatsMember::Type::kInt64: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueInt64()); + break; + case RTCStatsMember::Type::kUint64: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue((int64_t)member->ValueUint64()); + break; + case RTCStatsMember::Type::kDouble: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueDouble()); + break; + case RTCStatsMember::Type::kString: + values[EncodableValue(member->GetName().std_string())] = + EncodableValue(member->ValueString().std_string()); + break; + default: + break; + } + } + report_map[EncodableValue("values")] = EncodableValue(values); + return report_map; +} + +void FlutterPeerConnection::GetStats( + const std::string& track_id, + RTCPeerConnection* pc, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + scoped_refptr track = base_->MediaTracksForId(track_id); + if (track != nullptr && track_id != "") { + bool found = false; + auto receivers = pc->receivers(); + for (auto receiver : receivers.std_vector()) { + if (receiver->track() && receiver->track()->id().c_string() == track_id) { + found = true; + pc->GetStats( + receiver, + [result_ptr](const vector> reports) { + EncodableList list; + for (int i = 0; i < reports.size(); i++) { + list.push_back(EncodableValue(statsToMap(reports[i]))); + } + EncodableMap params; + params[EncodableValue("stats")] = EncodableValue(list); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("GetStats", error); + }); + return; + } + } + auto senders = pc->senders(); + for (auto sender : senders.std_vector()) { + if (sender->track() && sender->track()->id().c_string() == track_id) { + found = true; + pc->GetStats( + sender, + [result_ptr](const vector> reports) { + EncodableList list; + for (int i = 0; i < reports.size(); i++) { + list.push_back(EncodableValue(statsToMap(reports[i]))); + } + EncodableMap params; + params[EncodableValue("stats")] = EncodableValue(list); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("GetStats", error); + }); + return; + } + } + if (!found) { + result_ptr->Error("GetStats", "Track not found"); + } + } else { + pc->GetStats( + [result_ptr](const vector> reports) { + EncodableList list; + for (int i = 0; i < reports.size(); i++) { + list.push_back(EncodableValue(statsToMap(reports[i]))); + } + EncodableMap params; + params[EncodableValue("stats")] = EncodableValue(list); + result_ptr->Success(EncodableValue(params)); + }, + [result_ptr](const char* error) { + result_ptr->Error("GetStats", error); + }); + } +} + +void FlutterPeerConnection::MediaStreamAddTrack( + scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("audio")) { + stream->AddTrack(static_cast(track.get())); + } else if (0 == kind.compare("video")) { + stream->AddTrack(static_cast(track.get())); + } + + result_ptr->Success(); +} + +void FlutterPeerConnection::MediaStreamRemoveTrack( + scoped_refptr stream, + scoped_refptr track, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("audio")) { + stream->RemoveTrack(static_cast(track.get())); + } else if (0 == kind.compare("video")) { + stream->RemoveTrack(static_cast(track.get())); + } + + result_ptr->Success(); +} + +void FlutterPeerConnection::AddTrack( + RTCPeerConnection* pc, + scoped_refptr track, + std::vector streamIds, + std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + std::string kind = track->kind().std_string(); + if (0 == kind.compare("audio")) { + auto sender = + pc->AddTrack(reinterpret_cast(track.get()), streamIds); + if (sender.get() != nullptr) { + result_ptr->Success(EncodableValue(rtpSenderToMap(sender))); + return; + } + } else if (0 == kind.compare("video")) { + auto sender = + pc->AddTrack(reinterpret_cast(track.get()), streamIds); + if (sender.get() != nullptr) { + result_ptr->Success(EncodableValue(rtpSenderToMap(sender))); + return; + } + } + result->Success(); +} + +void FlutterPeerConnection::RemoveTrack( + RTCPeerConnection* pc, + std::string senderId, + std::unique_ptr result) { + auto sender = base_->GetRtpSenderById(pc, senderId); + if (nullptr == sender.get()) { + result->Error("RemoveTrack", "not find RtpSender "); + return; + } + + EncodableMap map; + map[EncodableValue("result")] = EncodableValue(pc->RemoveTrack(sender)); + + result->Success(EncodableValue(map)); +} + +FlutterPeerConnectionObserver::FlutterPeerConnectionObserver( + FlutterWebRTCBase* base, + scoped_refptr peerconnection, + BinaryMessenger* messenger, + TaskRunner* task_runner, + const std::string& channel_name, + std::string& peerConnectionId) + : event_channel_(EventChannelProxy::Create(messenger, task_runner, channel_name)), + peerconnection_(peerconnection), + base_(base), + id_(peerConnectionId) { + peerconnection->RegisterRTCPeerConnectionObserver(this); +} + + +void FlutterPeerConnectionObserver::OnSignalingState(RTCSignalingState state) { + EncodableMap params; + params[EncodableValue("event")] = "signalingState"; + params[EncodableValue("state")] = signalingStateString(state); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnPeerConnectionState( + RTCPeerConnectionState state) { + EncodableMap params; + params[EncodableValue("event")] = "peerConnectionState"; + params[EncodableValue("state")] = peerConnectionStateString(state); + event_channel_->Success(EncodableValue(params)); +} + + +void FlutterPeerConnectionObserver::OnIceGatheringState( + RTCIceGatheringState state) { + EncodableMap params; + params[EncodableValue("event")] = "iceGatheringState"; + params[EncodableValue("state")] = iceGatheringStateString(state); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnIceConnectionState( + RTCIceConnectionState state) { + EncodableMap params; + params[EncodableValue("event")] = "iceConnectionState"; + params[EncodableValue("state")] = iceConnectionStateString(state); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnIceCandidate( + scoped_refptr candidate) { + EncodableMap params; + params[EncodableValue("event")] = "onCandidate"; + EncodableMap cand; + cand[EncodableValue("candidate")] = + EncodableValue(candidate->candidate().std_string()); + cand[EncodableValue("sdpMLineIndex")] = + EncodableValue(candidate->sdp_mline_index()); + cand[EncodableValue("sdpMid")] = + EncodableValue(candidate->sdp_mid().std_string()); + params[EncodableValue("candidate")] = EncodableValue(cand); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnAddStream( + scoped_refptr stream) { + std::string streamId = stream->id().std_string(); + + EncodableMap params; + params[EncodableValue("event")] = "onAddStream"; + params[EncodableValue("streamId")] = EncodableValue(streamId); + EncodableList audioTracks; + auto audio_tracks = stream->audio_tracks(); + for (scoped_refptr track : audio_tracks.std_vector()) { + EncodableMap audioTrack; + audioTrack[EncodableValue("id")] = EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + audioTrack[EncodableValue("enabled")] = EncodableValue(track->enabled()); + audioTrack[EncodableValue("remote")] = EncodableValue(true); + audioTrack[EncodableValue("readyState")] = "live"; + + audioTracks.push_back(EncodableValue(audioTrack)); + } + params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); + + EncodableList videoTracks; + auto video_tracks = stream->video_tracks(); + for (scoped_refptr track : video_tracks.std_vector()) { + EncodableMap videoTrack; + + videoTrack[EncodableValue("id")] = EncodableValue(track->id().std_string()); + videoTrack[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + videoTrack[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + videoTrack[EncodableValue("enabled")] = EncodableValue(track->enabled()); + videoTrack[EncodableValue("remote")] = EncodableValue(true); + videoTrack[EncodableValue("readyState")] = "live"; + + videoTracks.push_back(EncodableValue(videoTrack)); + } + remote_streams_[streamId] = scoped_refptr(stream); + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnRemoveStream( + scoped_refptr stream) { + EncodableMap params; + params[EncodableValue("event")] = "onRemoveStream"; + params[EncodableValue("streamId")] = + EncodableValue(stream->label().std_string()); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnAddTrack( + vector> streams, + scoped_refptr receiver) { + auto track = receiver->track(); + + std::vector> mediaStreams; + for (scoped_refptr stream : streams.std_vector()) { + mediaStreams.push_back(stream); + EncodableMap params; + params[EncodableValue("event")] = "onAddTrack"; + params[EncodableValue("streamId")] = + EncodableValue(stream->label().std_string()); + params[EncodableValue("trackId")] = + EncodableValue(track->id().std_string()); + + EncodableMap audioTrack; + audioTrack[EncodableValue("id")] = EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("label")] = + EncodableValue(track->id().std_string()); + audioTrack[EncodableValue("kind")] = + EncodableValue(track->kind().std_string()); + audioTrack[EncodableValue("enabled")] = EncodableValue(track->enabled()); + audioTrack[EncodableValue("remote")] = EncodableValue(true); + audioTrack[EncodableValue("readyState")] = "live"; + params[EncodableValue("track")] = EncodableValue(audioTrack); + + event_channel_->Success(EncodableValue(params)); + } +} + +void FlutterPeerConnectionObserver::OnTrack( + scoped_refptr transceiver) { + auto receiver = transceiver->receiver(); + EncodableMap params; + EncodableList streams_info; + auto streams = receiver->streams(); + for (scoped_refptr item : streams.std_vector()) { + streams_info.push_back(EncodableValue(mediaStreamToMap(item, id_))); + } + params[EncodableValue("event")] = "onTrack"; + params[EncodableValue("streams")] = EncodableValue(streams_info); + params[EncodableValue("track")] = + EncodableValue(mediaTrackToMap(receiver->track())); + params[EncodableValue("receiver")] = + EncodableValue(rtpReceiverToMap(receiver)); + params[EncodableValue("transceiver")] = + EncodableValue(transceiverToMap(transceiver)); + + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnRemoveTrack( + scoped_refptr receiver) { + auto track = receiver->track(); + + EncodableMap params; + params[EncodableValue("event")] = "onRemoveTrack"; + params[EncodableValue("trackId")] = EncodableValue(track->id().std_string()); + params[EncodableValue("track")] = EncodableValue(mediaTrackToMap(track)); + params[EncodableValue("receiver")] = + EncodableValue(rtpReceiverToMap(receiver)); + event_channel_->Success(EncodableValue(params)); +} + +// void FlutterPeerConnectionObserver::OnRemoveTrack( +// scoped_refptr stream, +// scoped_refptr track) { + +// EncodableMap params; +// params[EncodableValue("event")] = "onRemoveTrack"; +// params[EncodableValue("streamId")] = stream->label(); +// params[EncodableValue("trackId")] = track->id(); +// +// EncodableMap videoTrack; +// videoTrack[EncodableValue("id")] = track->id(); +// videoTrack[EncodableValue("label")] = track->id(); +// videoTrack[EncodableValue("kind")] = track->kind(); +// videoTrack[EncodableValue("enabled")] = track->enabled(); +// videoTrack[EncodableValue("remote")] = true; +// videoTrack[EncodableValue("readyState")] = "live"; +// params[EncodableValue("track")] = videoTrack; +// +// event_channel_->Success(EncodableValue(params)); + +//} + +void FlutterPeerConnectionObserver::OnDataChannel( + scoped_refptr data_channel) { + int channel_id = data_channel->id(); + std::string channel_uuid = base_->GenerateUUID(); + + std::string event_channel = + "FlutterWebRTC/dataChannelEvent" + id_ + channel_uuid; + + std::unique_ptr observer( + new FlutterRTCDataChannelObserver(data_channel, base_->messenger_, + base_->task_runner_, + event_channel)); + + base_->lock(); + base_->data_channel_observers_[channel_uuid] = std::move(observer); + base_->unlock(); + + EncodableMap params; + params[EncodableValue("event")] = "didOpenDataChannel"; + params[EncodableValue("id")] = EncodableValue(channel_id); + params[EncodableValue("label")] = + EncodableValue(data_channel->label().std_string()); + params[EncodableValue("flutterId")] = EncodableValue(channel_uuid); + event_channel_->Success(EncodableValue(params)); +} + +void FlutterPeerConnectionObserver::OnRenegotiationNeeded() { + EncodableMap params; + params[EncodableValue("event")] = "onRenegotiationNeeded"; + event_channel_->Success(EncodableValue(params)); +} + +scoped_refptr FlutterPeerConnectionObserver::MediaStreamForId( + const std::string& id) { + auto it = remote_streams_.find(id); + if (it != remote_streams_.end()) + return (*it).second; + return nullptr; +} + +scoped_refptr FlutterPeerConnectionObserver::MediaTrackForId( + const std::string& id) { + for (auto it = remote_streams_.begin(); it != remote_streams_.end(); it++) { + auto remoteStream = (*it).second; + auto audio_tracks = remoteStream->audio_tracks(); + for (auto track : audio_tracks.std_vector()) { + if (track->id().std_string() == id) { + return track; + } + } + auto video_tracks = remoteStream->video_tracks(); + for (auto track : video_tracks.std_vector()) { + if (track->id().std_string() == id) { + return track; + } + } + } + return nullptr; +} + +void FlutterPeerConnectionObserver::RemoveStreamForId(const std::string& id) { + auto it = remote_streams_.find(id); + if (it != remote_streams_.end()) + remote_streams_.erase(it); +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_screen_capture.cc b/common/cpp/src/flutter_screen_capture.cc new file mode 100644 index 0000000000..df660daf9b --- /dev/null +++ b/common/cpp/src/flutter_screen_capture.cc @@ -0,0 +1,282 @@ +#include "flutter_screen_capture.h" + +namespace flutter_webrtc_plugin { + +FlutterScreenCapture::FlutterScreenCapture(FlutterWebRTCBase* base) + : base_(base) {} + +bool FlutterScreenCapture::BuildDesktopSourcesList(const EncodableList& types, + bool force_reload) { + size_t size = types.size(); + sources_.clear(); + for (size_t i = 0; i < size; i++) { + std::string type_str = GetValue(types[i]); + DesktopType desktop_type = DesktopType::kScreen; + if (type_str == "screen") { + desktop_type = DesktopType::kScreen; + } else if (type_str == "window") { + desktop_type = DesktopType::kWindow; + } else { + // std::cout << "Unknown type " << type_str << std::endl; + return false; + } + scoped_refptr source_list; + auto it = medialist_.find(desktop_type); + if (it != medialist_.end()) { + source_list = (*it).second; + } else { + source_list = base_->desktop_device_->GetDesktopMediaList(desktop_type); + source_list->RegisterMediaListObserver(this); + medialist_[desktop_type] = source_list; + } + source_list->UpdateSourceList(force_reload); + int count = source_list->GetSourceCount(); + for (int j = 0; j < count; j++) { + sources_.push_back(source_list->GetSource(j)); + } + } + return true; +} + +void FlutterScreenCapture::GetDesktopSources( + const EncodableList& types, + std::unique_ptr result) { + if (!BuildDesktopSourcesList(types, true)) { + result->Error("Bad Arguments", "Failed to get desktop sources"); + return; + } + + EncodableList sources; + for (auto source : sources_) { + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("name")] = EncodableValue(source->name().std_string()); + info[EncodableValue("type")] = + EncodableValue(source->type() == kWindow ? "window" : "screen"); + // TODO "thumbnailSize" + info[EncodableValue("thumbnailSize")] = EncodableMap{ + {EncodableValue("width"), EncodableValue(0)}, + {EncodableValue("height"), EncodableValue(0)}, + }; + sources.push_back(EncodableValue(info)); + } + + std::cout << " sources: " << sources.size() << std::endl; + auto map = EncodableMap(); + map[EncodableValue("sources")] = sources; + result->Success(EncodableValue(map)); +} + +void FlutterScreenCapture::UpdateDesktopSources( + const EncodableList& types, + std::unique_ptr result) { + if (!BuildDesktopSourcesList(types, false)) { + result->Error("Bad Arguments", "Failed to update desktop sources"); + return; + } + auto map = EncodableMap(); + map[EncodableValue("result")] = true; + result->Success(EncodableValue(map)); +} + +void FlutterScreenCapture::OnMediaSourceAdded( + scoped_refptr source) { + std::cout << " OnMediaSourceAdded: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceAdded"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("name")] = EncodableValue(source->name().std_string()); + info[EncodableValue("type")] = + EncodableValue(source->type() == kWindow ? "window" : "screen"); + // TODO "thumbnailSize" + info[EncodableValue("thumbnailSize")] = EncodableMap{ + {EncodableValue("width"), EncodableValue(0)}, + {EncodableValue("height"), EncodableValue(0)}, + }; + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnMediaSourceRemoved( + scoped_refptr source) { + std::cout << " OnMediaSourceRemoved: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceRemoved"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnMediaSourceNameChanged( + scoped_refptr source) { + std::cout << " OnMediaSourceNameChanged: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceNameChanged"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("name")] = EncodableValue(source->name().std_string()); + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnMediaSourceThumbnailChanged( + scoped_refptr source) { + std::cout << " OnMediaSourceThumbnailChanged: " << source->id().std_string() + << std::endl; + + EncodableMap info; + info[EncodableValue("event")] = "desktopSourceThumbnailChanged"; + info[EncodableValue("id")] = EncodableValue(source->id().std_string()); + info[EncodableValue("thumbnail")] = + EncodableValue(source->thumbnail().std_vector()); + base_->event_channel()->Success(EncodableValue(info)); +} + +void FlutterScreenCapture::OnStart(scoped_refptr capturer) { + // std::cout << " OnStart: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::OnPaused( + scoped_refptr capturer) { + // std::cout << " OnPaused: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::OnStop(scoped_refptr capturer) { + // std::cout << " OnStop: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::OnError(scoped_refptr capturer) { + // std::cout << " OnError: " << capturer->source()->id().std_string() + // << std::endl; +} + +void FlutterScreenCapture::GetDesktopSourceThumbnail( + std::string source_id, + int width, + int height, + std::unique_ptr result) { + scoped_refptr source; + for (auto src : sources_) { + if (src->id().std_string() == source_id) { + source = src; + } + } + if (source.get() == nullptr) { + result->Error("Bad Arguments", "Failed to get desktop source thumbnail"); + return; + } + std::cout << " GetDesktopSourceThumbnail: " << source->id().std_string() + << std::endl; + source->UpdateThumbnail(); + result->Success(EncodableValue(source->thumbnail().std_vector())); +} + +void FlutterScreenCapture::GetDisplayMedia( + const EncodableMap& constraints, + std::unique_ptr result) { + std::string source_id = "0"; + // DesktopType source_type = kScreen; + double fps = 30.0; + + const EncodableMap video = findMap(constraints, "video"); + if (video != EncodableMap()) { + const EncodableMap deviceId = findMap(video, "deviceId"); + if (deviceId != EncodableMap()) { + source_id = findString(deviceId, "exact"); + if (source_id.empty()) { + result->Error("Bad Arguments", "Incorrect video->deviceId->exact"); + return; + } + if (source_id != "0") { + // source_type = DesktopType::kWindow; + } + } + const EncodableMap mandatory = findMap(video, "mandatory"); + if (mandatory != EncodableMap()) { + double frameRate = findDouble(mandatory, "frameRate"); + if (frameRate != 0.0) { + fps = frameRate; + } + } + } + + std::string uuid = base_->GenerateUUID(); + + scoped_refptr stream = + base_->factory_->CreateStream(uuid.c_str()); + + EncodableMap params; + params[EncodableValue("streamId")] = EncodableValue(uuid); + + // AUDIO + + params[EncodableValue("audioTracks")] = EncodableValue(EncodableList()); + + // VIDEO + + EncodableMap video_constraints; + auto it = constraints.find(EncodableValue("video")); + if (it != constraints.end() && TypeIs(it->second)) { + video_constraints = GetValue(it->second); + } + + scoped_refptr source; + for (auto src : sources_) { + if (src->id().std_string() == source_id) { + source = src; + } + } + + if (!source.get()) { + result->Error("Bad Arguments", "source not found!"); + return; + } + + scoped_refptr desktop_capturer = + base_->desktop_device_->CreateDesktopCapturer(source); + + if (!desktop_capturer.get()) { + result->Error("Bad Arguments", "CreateDesktopCapturer failed!"); + return; + } + + desktop_capturer->RegisterDesktopCapturerObserver(this); + + const char* video_source_label = "screen_capture_input"; + + scoped_refptr video_source = + base_->factory_->CreateDesktopSource( + desktop_capturer, video_source_label, + base_->ParseMediaConstraints(video_constraints)); + + // TODO: RTCVideoSource -> RTCVideoTrack + + scoped_refptr track = + base_->factory_->CreateVideoTrack(video_source, uuid.c_str()); + + EncodableList videoTracks; + EncodableMap info; + info[EncodableValue("id")] = EncodableValue(track->id().std_string()); + info[EncodableValue("label")] = EncodableValue(track->id().std_string()); + info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); + info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + videoTracks.push_back(EncodableValue(info)); + params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); + + stream->AddTrack(track); + + base_->local_tracks_[track->id().std_string()] = track; + + base_->local_streams_[uuid] = stream; + + desktop_capturer->Start(uint32_t(fps)); + + result->Success(EncodableValue(params)); +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_video_renderer.cc b/common/cpp/src/flutter_video_renderer.cc new file mode 100644 index 0000000000..77c5dd12c6 --- /dev/null +++ b/common/cpp/src/flutter_video_renderer.cc @@ -0,0 +1,185 @@ +#include "flutter_video_renderer.h" + +namespace flutter_webrtc_plugin { + +FlutterVideoRenderer::~FlutterVideoRenderer() {} + +void FlutterVideoRenderer::initialize( + TextureRegistrar* registrar, + BinaryMessenger* messenger, + TaskRunner* task_runner, + std::unique_ptr texture, + int64_t trxture_id) { + registrar_ = registrar; + texture_ = std::move(texture); + texture_id_ = trxture_id; + std::string channel_name = + "FlutterWebRTC/Texture" + std::to_string(texture_id_); + event_channel_ = EventChannelProxy::Create(messenger, task_runner, channel_name); +} + +const FlutterDesktopPixelBuffer* FlutterVideoRenderer::CopyPixelBuffer( + size_t width, + size_t height) const { + mutex_.lock(); + if (pixel_buffer_.get() && frame_.get()) { + if (pixel_buffer_->width != frame_->width() || + pixel_buffer_->height != frame_->height()) { + size_t buffer_size = + (size_t(frame_->width()) * size_t(frame_->height())) * (32 >> 3); + rgb_buffer_.reset(new uint8_t[buffer_size]); + pixel_buffer_->width = frame_->width(); + pixel_buffer_->height = frame_->height(); + } + + frame_->ConvertToARGB(RTCVideoFrame::Type::kABGR, rgb_buffer_.get(), 0, + static_cast(pixel_buffer_->width), + static_cast(pixel_buffer_->height)); + + pixel_buffer_->buffer = rgb_buffer_.get(); + mutex_.unlock(); + return pixel_buffer_.get(); + } + mutex_.unlock(); + return nullptr; +} + +void FlutterVideoRenderer::OnFrame(scoped_refptr frame) { + if (!first_frame_rendered) { + EncodableMap params; + params[EncodableValue("event")] = "didFirstFrameRendered"; + params[EncodableValue("id")] = EncodableValue(texture_id_); + event_channel_->Success(EncodableValue(params)); + pixel_buffer_.reset(new FlutterDesktopPixelBuffer()); + pixel_buffer_->width = 0; + pixel_buffer_->height = 0; + first_frame_rendered = true; + } + if (rotation_ != frame->rotation()) { + EncodableMap params; + params[EncodableValue("event")] = "didTextureChangeRotation"; + params[EncodableValue("id")] = EncodableValue(texture_id_); + params[EncodableValue("rotation")] = + EncodableValue((int32_t)frame->rotation()); + event_channel_->Success(EncodableValue(params)); + rotation_ = frame->rotation(); + } + if (last_frame_size_.width != frame->width() || + last_frame_size_.height != frame->height()) { + EncodableMap params; + params[EncodableValue("event")] = "didTextureChangeVideoSize"; + params[EncodableValue("id")] = EncodableValue(texture_id_); + params[EncodableValue("width")] = EncodableValue((int32_t)frame->width()); + params[EncodableValue("height")] = EncodableValue((int32_t)frame->height()); + event_channel_->Success(EncodableValue(params)); + + last_frame_size_ = {(size_t)frame->width(), (size_t)frame->height()}; + } + mutex_.lock(); + frame_ = frame; + mutex_.unlock(); + registrar_->MarkTextureFrameAvailable(texture_id_); +} + +void FlutterVideoRenderer::SetVideoTrack(scoped_refptr track) { + if (track_ != track) { + if (track_) + track_->RemoveRenderer(this); + track_ = track; + last_frame_size_ = {0, 0}; + first_frame_rendered = false; + if (track_) + track_->AddRenderer(this); + } +} + +bool FlutterVideoRenderer::CheckMediaStream(std::string mediaId) { + if (0 == mediaId.size() || 0 == media_stream_id.size()) { + return false; + } + return mediaId == media_stream_id; +} + +bool FlutterVideoRenderer::CheckVideoTrack(std::string mediaId) { + if (0 == mediaId.size() || !track_) { + return false; + } + return mediaId == track_->id().std_string(); +} + +FlutterVideoRendererManager::FlutterVideoRendererManager( + FlutterWebRTCBase* base) + : base_(base) {} + +void FlutterVideoRendererManager::CreateVideoRendererTexture( + std::unique_ptr result) { + auto texture = new RefCountedObject(); + auto textureVariant = + std::make_unique(flutter::PixelBufferTexture( + [texture](size_t width, + size_t height) -> const FlutterDesktopPixelBuffer* { + return texture->CopyPixelBuffer(width, height); + })); + + auto texture_id = base_->textures_->RegisterTexture(textureVariant.get()); + texture->initialize(base_->textures_, base_->messenger_, base_->task_runner_, + std::move(textureVariant), texture_id); + renderers_[texture_id] = texture; + EncodableMap params; + params[EncodableValue("textureId")] = EncodableValue(texture_id); + result->Success(EncodableValue(params)); +} + +void FlutterVideoRendererManager::VideoRendererSetSrcObject( + int64_t texture_id, + const std::string& stream_id, + const std::string& owner_tag, + const std::string& track_id) { + scoped_refptr stream = + base_->MediaStreamForId(stream_id, owner_tag); + + auto it = renderers_.find(texture_id); + if (it != renderers_.end()) { + FlutterVideoRenderer* renderer = it->second.get(); + if (stream.get()) { + auto video_tracks = stream->video_tracks(); + if (video_tracks.size() > 0) { + if (track_id == std::string()) { + renderer->SetVideoTrack(video_tracks[0]); + } else { + for (auto track : video_tracks.std_vector()) { + if (track->id().std_string() == track_id) { + renderer->SetVideoTrack(track); + break; + } + } + } + renderer->media_stream_id = stream_id; + } + } else { + renderer->SetVideoTrack(nullptr); + } + } +} + +void FlutterVideoRendererManager::VideoRendererDispose( + int64_t texture_id, + std::unique_ptr result) { + auto it = renderers_.find(texture_id); + if (it != renderers_.end()) { + it->second->SetVideoTrack(nullptr); +#if defined(_WINDOWS) + base_->textures_->UnregisterTexture(texture_id, + [&, it] { renderers_.erase(it); }); +#else + base_->textures_->UnregisterTexture(texture_id); + renderers_.erase(it); +#endif + result->Success(); + return; + } + result->Error("VideoRendererDisposeFailed", + "VideoRendererDispose() texture not found!"); +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc.cc b/common/cpp/src/flutter_webrtc.cc new file mode 100644 index 0000000000..5e0afb6e17 --- /dev/null +++ b/common/cpp/src/flutter_webrtc.cc @@ -0,0 +1,1278 @@ +#include "flutter_webrtc.h" + +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +namespace flutter_webrtc_plugin { + +FlutterWebRTC::FlutterWebRTC(FlutterWebRTCPlugin* plugin) + : FlutterWebRTCBase::FlutterWebRTCBase(plugin->messenger(), + plugin->textures(), + plugin->task_runner()), + FlutterVideoRendererManager::FlutterVideoRendererManager(this), + FlutterMediaStream::FlutterMediaStream(this), + FlutterPeerConnection::FlutterPeerConnection(this), + FlutterScreenCapture::FlutterScreenCapture(this), + FlutterDataChannel::FlutterDataChannel(this), + FlutterFrameCryptor::FlutterFrameCryptor(this) {} + +FlutterWebRTC::~FlutterWebRTC() {} + +void FlutterWebRTC::HandleMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result) { + if (method_call.method_name().compare("initialize") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap options = findMap(params, "options"); + result->Success(); + } else if (method_call.method_name().compare("createPeerConnection") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap configuration = findMap(params, "configuration"); + const EncodableMap constraints = findMap(params, "constraints"); + CreateRTCPeerConnection(configuration, constraints, std::move(result)); + } else if (method_call.method_name().compare("getUserMedia") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap constraints = findMap(params, "constraints"); + GetUserMedia(constraints, std::move(result)); + } else if (method_call.method_name().compare("getDisplayMedia") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const EncodableMap constraints = findMap(params, "constraints"); + + GetDisplayMedia(constraints, std::move(result)); + } else if (method_call.method_name().compare("getDesktopSources") == 0) { + // types: ["screen", "window"] + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const EncodableList types = findList(params, "types"); + if (types.empty()) { + result->Error("Bad Arguments", "Types is required"); + return; + } + GetDesktopSources(types, std::move(result)); + } else if (method_call.method_name().compare("updateDesktopSources") == 0) { + // types: ["screen", "window"] + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const EncodableList types = findList(params, "types"); + if (types.empty()) { + result->Error("Bad Arguments", "Types is required"); + return; + } + UpdateDesktopSources(types, std::move(result)); + } else if (method_call.method_name().compare("getDesktopSourceThumbnail") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + std::string sourceId = findString(params, "sourceId"); + if (sourceId.empty()) { + result->Error("Bad Arguments", "Incorrect sourceId"); + return; + } + const EncodableMap thumbnailSize = findMap(params, "thumbnailSize"); + if (!thumbnailSize.empty()) { + int width = 0; + int height = 0; + GetDesktopSourceThumbnail(sourceId, width, height, std::move(result)); + } else { + result->Error("Bad Arguments", "Bad arguments received"); + } + } else if (method_call.method_name().compare("getSources") == 0) { + GetSources(std::move(result)); + } else if (method_call.method_name().compare("selectAudioInput") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string deviceId = findString(params, "deviceId"); + SelectAudioInput(deviceId, std::move(result)); + } else if (method_call.method_name().compare("selectAudioOutput") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string deviceId = findString(params, "deviceId"); + SelectAudioOutput(deviceId, std::move(result)); + } else if (method_call.method_name().compare("mediaStreamGetTracks") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string streamId = findString(params, "streamId"); + MediaStreamGetTracks(streamId, std::move(result)); + } else if (method_call.method_name().compare("createOffer") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "constraints"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("createOfferFailed", + "createOffer() peerConnection is null"); + return; + } + CreateOffer(constraints, pc, std::move(result)); + } else if (method_call.method_name().compare("createAnswer") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "constraints"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("createAnswerFailed", + "createAnswer() peerConnection is null"); + return; + } + CreateAnswer(constraints, pc, std::move(result)); + } else if (method_call.method_name().compare("addStream") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string streamId = findString(params, "streamId"); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (!stream) { + result->Error("addStreamFailed", "addStream() stream not found!"); + return; + } + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("addStreamFailed", "addStream() peerConnection is null"); + return; + } + pc->AddStream(stream); + result->Success(); + } else if (method_call.method_name().compare("removeStream") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string streamId = findString(params, "streamId"); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (!stream) { + result->Error("removeStreamFailed", "removeStream() stream not found!"); + return; + } + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("removeStreamFailed", + "removeStream() peerConnection is null"); + return; + } + pc->RemoveStream(stream); + result->Success(); + } else if (method_call.method_name().compare("setLocalDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setLocalDescriptionFailed", + "setLocalDescription() peerConnection is null"); + return; + } + + SdpParseError error; + scoped_refptr description = + RTCSessionDescription::Create(findString(constraints, "type").c_str(), + findString(constraints, "sdp").c_str(), + &error); + + if (description.get() != nullptr) { + SetLocalDescription(description.get(), pc, std::move(result)); + } else { + result->Error("setLocalDescriptionFailed", "Invalid type or sdp"); + } + } else if (method_call.method_name().compare("setRemoteDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setRemoteDescriptionFailed", + "setRemoteDescription() peerConnection is null"); + return; + } + + SdpParseError error; + scoped_refptr description = + RTCSessionDescription::Create(findString(constraints, "type").c_str(), + findString(constraints, "sdp").c_str(), + &error); + + if (description.get() != nullptr) { + SetRemoteDescription(description.get(), pc, std::move(result)); + } else { + result->Error("setRemoteDescriptionFailed", "Invalid type or sdp"); + } + } else if (method_call.method_name().compare("addCandidate") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "candidate"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("addCandidateFailed", + "addCandidate() peerConnection is null"); + return; + } + + SdpParseError error; + std::string candidate = findString(constraints, "candidate"); + if (candidate.empty()) { + // received the end-of-candidates + result->Success(); + return; + } + int sdpMLineIndex = findInt(constraints, "sdpMLineIndex"); + scoped_refptr rtc_candidate = RTCIceCandidate::Create( + candidate.c_str(), findString(constraints, "sdpMid").c_str(), + sdpMLineIndex == -1 ? 0 : sdpMLineIndex, &error); + + if (rtc_candidate.get() != nullptr) { + AddIceCandidate(rtc_candidate.get(), pc, std::move(result)); + } else { + result->Error("addCandidateFailed", "Invalid candidate"); + } + } else if (method_call.method_name().compare("getStats") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string track_id = findString(params, "trackId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getStatsFailed", "getStats() peerConnection is null"); + return; + } + GetStats(track_id, pc, std::move(result)); + } else if (method_call.method_name().compare("createDataChannel") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("createDataChannelFailed", + "createDataChannel() peerConnection is null"); + return; + } + + const std::string label = findString(params, "label"); + const EncodableMap dataChannelDict = findMap(params, "dataChannelDict"); + + CreateDataChannel(peerConnectionId, label, dataChannelDict, pc, + std::move(result)); + } else if (method_call.method_name().compare("dataChannelSend") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("dataChannelSendFailed", + "dataChannelSend() peerConnection is null"); + return; + } + + const std::string dataChannelId = findString(params, "dataChannelId"); + const std::string type = findString(params, "type"); + const EncodableValue data = findEncodableValue(params, "data"); + RTCDataChannel* data_channel = DataChannelForId(dataChannelId); + if (data_channel == nullptr) { + result->Error("dataChannelSendFailed", + "dataChannelSend() data_channel is null"); + return; + } + DataChannelSend(data_channel, type, data, std::move(result)); + } else if (method_call.method_name().compare( + "dataChannelGetBufferedAmount") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("dataChannelGetBufferedAmountFailed", + "dataChannelGetBufferedAmount() peerConnection is null"); + return; + } + + const std::string dataChannelId = findString(params, "dataChannelId"); + RTCDataChannel* data_channel = DataChannelForId(dataChannelId); + if (data_channel == nullptr) { + result->Error("dataChannelGetBufferedAmountFailed", + "dataChannelGetBufferedAmount() data_channel is null"); + return; + } + DataChannelGetBufferedAmount(data_channel, std::move(result)); + } else if (method_call.method_name().compare("dataChannelClose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("dataChannelCloseFailed", + "dataChannelClose() peerConnection is null"); + return; + } + + const std::string dataChannelId = findString(params, "dataChannelId"); + RTCDataChannel* data_channel = DataChannelForId(dataChannelId); + if (data_channel == nullptr) { + result->Error("dataChannelCloseFailed", + "dataChannelClose() data_channel is null"); + return; + } + DataChannelClose(data_channel, dataChannelId, std::move(result)); + } else if (method_call.method_name().compare("streamDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string stream_id = findString(params, "streamId"); + MediaStreamDispose(stream_id, std::move(result)); + } else if (method_call.method_name().compare("mediaStreamTrackSetEnable") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string track_id = findString(params, "trackId"); + const EncodableValue enable = findEncodableValue(params, "enabled"); + RTCMediaTrack* track = MediaTrackForId(track_id); + if (track != nullptr) { + track->set_enabled(GetValue(enable)); + } + result->Success(); + } else if (method_call.method_name().compare("trackDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string track_id = findString(params, "trackId"); + MediaStreamTrackDispose(track_id, std::move(result)); + } else if (method_call.method_name().compare("restartIce") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("restartIceFailed", "restartIce() peerConnection is null"); + return; + } + pc->RestartIce(); + result->Success(); + } else if (method_call.method_name().compare("peerConnectionClose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("peerConnectionCloseFailed", + "peerConnectionClose() peerConnection is null"); + return; + } + RTCPeerConnectionClose(pc, peerConnectionId, std::move(result)); + } else if (method_call.method_name().compare("peerConnectionDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Success(); + return; + } + RTCPeerConnectionDispose(pc, peerConnectionId, std::move(result)); + } else if (method_call.method_name().compare("createVideoRenderer") == 0) { + CreateVideoRendererTexture(std::move(result)); + } else if (method_call.method_name().compare("videoRendererDispose") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + int64_t texture_id = findLongInt(params, "textureId"); + VideoRendererDispose(texture_id, std::move(result)); + } else if (method_call.method_name().compare("videoRendererSetSrcObject") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string stream_id = findString(params, "streamId"); + int64_t texture_id = findLongInt(params, "textureId"); + const std::string owner_tag = findString(params, "ownerTag"); + const std::string track_id = findString(params, "trackId"); + + VideoRendererSetSrcObject(texture_id, stream_id, owner_tag, track_id); + result->Success(); + } else if (method_call.method_name().compare( + "mediaStreamTrackSwitchCamera") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string track_id = findString(params, "trackId"); + MediaStreamTrackSwitchCamera(track_id, std::move(result)); + } else if (method_call.method_name().compare("setVolume") == 0) { + auto args = method_call.arguments(); + if (!args) { + result->Error("Bad Arguments", "setVolume() Null arguments received"); + return; + } + + const EncodableMap params = GetValue(*args); + const std::string trackId = findString(params, "trackId"); + const std::optional volume = maybeFindDouble(params, "volume"); + + if (trackId.empty()) { + result->Error("Bad Arguments", "setVolume() Empty track provided"); + return; + } + + if (!volume.has_value()) { + result->Error("Bad Arguments", "setVolume() No volume provided"); + return; + } + + if (volume.value() < 0) { + result->Error("Bad Arguments", "setVolume() Volume must be positive"); + return; + } + + RTCMediaTrack* track = MediaTrackForId(trackId); + if (nullptr == track) { + result->Error("setVolume", "setVolume() Unable to find provided track"); + return; + } + + std::string kind = track->kind().std_string(); + if (0 != kind.compare("audio")) { + result->Error("setVolume", + "setVolume() Only audio tracks can have volume set"); + return; + } + + auto audioTrack = static_cast(track); + audioTrack->SetVolume(volume.value()); + + result->Success(); + } else if (method_call.method_name().compare("getLocalDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("GetLocalDescription", + "GetLocalDescription() peerConnection is null"); + return; + } + + GetLocalDescription(pc, std::move(result)); + } else if (method_call.method_name().compare("getRemoteDescription") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap constraints = findMap(params, "description"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("GetRemoteDescription", + "GetRemoteDescription() peerConnection is null"); + return; + } + + GetRemoteDescription(pc, std::move(result)); + } else if (method_call.method_name().compare("mediaStreamAddTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string streamId = findString(params, "streamId"); + const std::string trackId = findString(params, "trackId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (stream == nullptr) { + result->Error("MediaStreamAddTrack", + "MediaStreamAddTrack() stream is null"); + return; + } + + scoped_refptr track = MediaTracksForId(trackId); + if (track == nullptr) { + result->Error("MediaStreamAddTrack", + "MediaStreamAddTrack() track is null"); + return; + } + + MediaStreamAddTrack(stream, track, std::move(result)); + std::string kind = track->kind().std_string(); + for (int i = 0; i < renders_.size(); i++) { + FlutterVideoRenderer* renderer = renders_.at(i).get(); + if (renderer->CheckMediaStream(streamId) && 0 == kind.compare("video")) { + renderer->SetVideoTrack(static_cast(track.get())); + } + } + } else if (method_call.method_name().compare("mediaStreamRemoveTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string streamId = findString(params, "streamId"); + const std::string trackId = findString(params, "trackId"); + + scoped_refptr stream = MediaStreamForId(streamId); + if (stream == nullptr) { + result->Error("MediaStreamRemoveTrack", + "MediaStreamRemoveTrack() stream is null"); + return; + } + + scoped_refptr track = MediaTracksForId(trackId); + if (track == nullptr) { + result->Error("MediaStreamRemoveTrack", + "MediaStreamRemoveTrack() track is null"); + return; + } + + MediaStreamRemoveTrack(stream, track, std::move(result)); + + for (int i = 0; i < renders_.size(); i++) { + FlutterVideoRenderer* renderer = renders_.at(i).get(); + if (renderer->CheckVideoTrack(streamId)) { + renderer->SetVideoTrack(nullptr); + } + } + } else if (method_call.method_name().compare("addTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string trackId = findString(params, "trackId"); + const EncodableList streamIds = findList(params, "streamIds"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("AddTrack", "AddTrack() peerConnection is null"); + return; + } + + scoped_refptr track = MediaTracksForId(trackId); + if (track == nullptr) { + result->Error("AddTrack", "AddTrack() track is null"); + return; + } + std::vector ids; + for (EncodableValue value : streamIds) { + ids.push_back(GetValue(value)); + } + + AddTrack(pc, track, ids, std::move(result)); + + } else if (method_call.method_name().compare("removeTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string senderId = findString(params, "senderId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("removeTrack", "removeTrack() peerConnection is null"); + return; + } + + RemoveTrack(pc, senderId, std::move(result)); + + } else if (method_call.method_name().compare("addTransceiver") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const EncodableMap transceiverInit = findMap(params, "transceiverInit"); + const std::string mediaType = findString(params, "mediaType"); + const std::string trackId = findString(params, "trackId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("addTransceiver", + "addTransceiver() peerConnection is null"); + return; + } + AddTransceiver(pc, trackId, mediaType, transceiverInit, std::move(result)); + } else if (method_call.method_name().compare("getTransceivers") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getTransceivers", + "getTransceivers() peerConnection is null"); + return; + } + + GetTransceivers(pc, std::move(result)); + } else if (method_call.method_name().compare("getReceivers") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getReceivers", "getReceivers() peerConnection is null"); + return; + } + + GetReceivers(pc, std::move(result)); + + } else if (method_call.method_name().compare("getSenders") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getSenders", "getSenders() peerConnection is null"); + return; + } + + GetSenders(pc, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetTrack", + "rtpSenderSetTrack() peerConnection is null"); + return; + } + + const std::string trackId = findString(params, "trackId"); + RTCMediaTrack* track = MediaTrackForId(trackId); + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetTrack", + "rtpSenderSetTrack() rtpSenderId is null or empty"); + return; + } + RtpSenderSetTrack(pc, track, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetStreams") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() peerConnection is null"); + return; + } + + const EncodableList encodableStreamIds = findList(params, "streamIds"); + if (encodableStreamIds.empty()) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() streamId is null or empty"); + return; + } + std::vector streamIds{}; + for (EncodableValue value : encodableStreamIds) { + streamIds.push_back(GetValue(value)); + } + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() rtpSenderId is null or empty"); + return; + } + RtpSenderSetStream(pc, streamIds, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderReplaceTrack") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderReplaceTrack", + "rtpSenderReplaceTrack() peerConnection is null"); + return; + } + + const std::string trackId = findString(params, "trackId"); + RTCMediaTrack* track = MediaTrackForId(trackId); + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderReplaceTrack", + "rtpSenderReplaceTrack() rtpSenderId is null or empty"); + return; + } + RtpSenderReplaceTrack(pc, track, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetParameters") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() peerConnection is null"); + return; + } + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() rtpSenderId is null or empty"); + return; + } + + const EncodableMap parameters = findMap(params, "parameters"); + if (0 == parameters.size()) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() parameters is null or empty"); + return; + } + + RtpSenderSetParameters(pc, rtpSenderId, parameters, std::move(result)); + } else if (method_call.method_name().compare("rtpTransceiverStop") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpTransceiverStop", + "rtpTransceiverStop() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverStop", + "rtpTransceiverStop() transceiverId is null or empty"); + return; + } + + RtpTransceiverStop(pc, transceiverId, std::move(result)); + } else if (method_call.method_name().compare( + "rtpTransceiverGetCurrentDirection") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error( + "rtpTransceiverGetCurrentDirection", + "rtpTransceiverGetCurrentDirection() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverGetCurrentDirection", + "rtpTransceiverGetCurrentDirection() transceiverId is " + "null or empty"); + return; + } + + RtpTransceiverGetCurrentDirection(pc, transceiverId, std::move(result)); + } else if (method_call.method_name().compare("rtpTransceiverSetDirection") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() transceiverId is " + "null or empty"); + return; + } + + const std::string direction = findString(params, "direction"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() direction is null or empty"); + return; + } + + RtpTransceiverSetDirection(pc, transceiverId, direction, std::move(result)); + } else if (method_call.method_name().compare("setConfiguration") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setConfiguration", + "setConfiguration() peerConnection is null"); + return; + } + + const EncodableMap configuration = findMap(params, "configuration"); + if (configuration.empty()) { + result->Error("setConfiguration", + "setConfiguration() configuration is null or empty"); + return; + } + SetConfiguration(pc, configuration, std::move(result)); + } else if (method_call.method_name().compare("captureFrame") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string path = findString(params, "path"); + if (path.empty()) { + result->Error("captureFrame", "captureFrame() path is null or empty"); + return; + } + + const std::string trackId = findString(params, "trackId"); + RTCMediaTrack* track = MediaTrackForId(trackId); + if (nullptr == track) { + result->Error("captureFrame", "captureFrame() track is null"); + return; + } + std::string kind = track->kind().std_string(); + if (0 != kind.compare("video")) { + result->Error("captureFrame", "captureFrame() track not is video track"); + return; + } + CaptureFrame(reinterpret_cast(track), path, + std::move(result)); + + } else if (method_call.method_name().compare("createLocalMediaStream") == 0) { + CreateLocalMediaStream(std::move(result)); + } else if (method_call.method_name().compare("canInsertDtmf") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string rtpSenderId = findString(params, "rtpSenderId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("canInsertDtmf", "canInsertDtmf() peerConnection is null"); + return; + } + + auto rtpSender = GetRtpSenderById(pc, rtpSenderId); + + if (rtpSender == nullptr) { + result->Error("sendDtmf", "sendDtmf() rtpSender is null"); + return; + } + auto dtmfSender = rtpSender->dtmf_sender(); + bool canInsertDtmf = dtmfSender->CanInsertDtmf(); + + result->Success(EncodableValue(canInsertDtmf)); + } else if (method_call.method_name().compare("sendDtmf") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + const std::string rtpSenderId = findString(params, "rtpSenderId"); + const std::string tone = findString(params, "tone"); + int duration = findInt(params, "duration"); + int gap = findInt(params, "gap"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("sendDtmf", "sendDtmf() peerConnection is null"); + return; + } + + auto rtpSender = GetRtpSenderById(pc, rtpSenderId); + + if (rtpSender == nullptr) { + result->Error("sendDtmf", "sendDtmf() rtpSender is null"); + return; + } + + auto dtmfSender = rtpSender->dtmf_sender(); + dtmfSender->InsertDtmf(tone, duration, gap); + + result->Success(); + } else if (method_call.method_name().compare("getRtpSenderCapabilities") == + 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + RTCMediaType mediaType = RTCMediaType::AUDIO; + const std::string kind = findString(params, "kind"); + if (0 == kind.compare("video")) { + mediaType = RTCMediaType::VIDEO; + } else if (0 == kind.compare("audio")) { + mediaType = RTCMediaType::AUDIO; + } else { + result->Error("getRtpSenderCapabilities", + "getRtpSenderCapabilities() kind is null or empty"); + return; + } + auto capabilities = factory_->GetRtpSenderCapabilities(mediaType); + EncodableMap map; + EncodableList codecsList; + for (auto codec : capabilities->codecs().std_vector()) { + EncodableMap codecMap; + codecMap[EncodableValue("mimeType")] = + EncodableValue(codec->mime_type().std_string()); + codecMap[EncodableValue("clockRate")] = + EncodableValue(codec->clock_rate()); + codecMap[EncodableValue("channels")] = EncodableValue(codec->channels()); + codecMap[EncodableValue("sdpFmtpLine")] = + EncodableValue(codec->sdp_fmtp_line().std_string()); + codecsList.push_back(EncodableValue(codecMap)); + } + map[EncodableValue("codecs")] = EncodableValue(codecsList); + map[EncodableValue("headerExtensions")] = EncodableValue(EncodableList()); + map[EncodableValue("fecMechanisms")] = EncodableValue(EncodableList()); + + result->Success(EncodableValue(map)); + } else if (method_call.method_name().compare("getRtpReceiverCapabilities") == + 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + + RTCMediaType mediaType = RTCMediaType::AUDIO; + const std::string kind = findString(params, "kind"); + if (0 == kind.compare("video")) { + mediaType = RTCMediaType::VIDEO; + } else if (0 == kind.compare("audio")) { + mediaType = RTCMediaType::AUDIO; + } else { + result->Error("getRtpSenderCapabilities", + "getRtpSenderCapabilities() kind is null or empty"); + return; + } + auto capabilities = factory_->GetRtpReceiverCapabilities(mediaType); + EncodableMap map; + EncodableList codecsList; + for (auto codec : capabilities->codecs().std_vector()) { + EncodableMap codecMap; + codecMap[EncodableValue("mimeType")] = + EncodableValue(codec->mime_type().std_string()); + codecMap[EncodableValue("clockRate")] = + EncodableValue(codec->clock_rate()); + codecMap[EncodableValue("channels")] = EncodableValue(codec->channels()); + codecMap[EncodableValue("sdpFmtpLine")] = + EncodableValue(codec->sdp_fmtp_line().std_string()); + codecsList.push_back(EncodableValue(codecMap)); + } + map[EncodableValue("codecs")] = EncodableValue(codecsList); + map[EncodableValue("headerExtensions")] = EncodableValue(EncodableList()); + map[EncodableValue("fecMechanisms")] = EncodableValue(EncodableList()); + + result->Success(EncodableValue(map)); + } else if (method_call.method_name().compare("setCodecPreferences") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("setCodecPreferences", + "setCodecPreferences() peerConnection is null"); + return; + } + + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("setCodecPreferences", + "setCodecPreferences() transceiverId is null or empty"); + return; + } + + const EncodableList codecs = findList(params, "codecs"); + if (codecs.empty()) { + result->Error("Bad Arguments", "Codecs is required"); + return; + } + RtpTransceiverSetCodecPreferences(pc, transceiverId, codecs, + std::move(result)); + } else if (method_call.method_name().compare("getSignalingState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getSignalingState", + "getSignalingState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + signalingStateString(pc->signaling_state()); + result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("getIceGatheringState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getIceGatheringState", + "getIceGatheringState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + iceGatheringStateString(pc->ice_gathering_state()); + result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("getIceConnectionState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getIceConnectionState", + "getIceConnectionState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + iceConnectionStateString(pc->ice_connection_state()); + result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("getConnectionState") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("getConnectionState", + "getConnectionState() peerConnection is null"); + return; + } + EncodableMap state; + state[EncodableValue("state")] = + peerConnectionStateString(pc->peer_connection_state()); + result->Success(EncodableValue(state)); + } else { + if (HandleFrameCryptorMethodCall(method_call, std::move(result), &result)) { + return; + } else { + result->NotImplemented(); + } + } +} + +} // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc_base.cc b/common/cpp/src/flutter_webrtc_base.cc new file mode 100644 index 0000000000..acd169cb2c --- /dev/null +++ b/common/cpp/src/flutter_webrtc_base.cc @@ -0,0 +1,366 @@ +#include "flutter_webrtc_base.h" + +#include "flutter_data_channel.h" +#include "flutter_peerconnection.h" + +#include "helper.h" + +namespace flutter_webrtc_plugin { + +const char* kEventChannelName = "FlutterWebRTC.Event"; + +FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger* messenger, + TextureRegistrar* textures, + TaskRunner *task_runner) + : messenger_(messenger), task_runner_(task_runner), textures_(textures) { + LibWebRTC::Initialize(); + factory_ = LibWebRTC::CreateRTCPeerConnectionFactory(); + audio_device_ = factory_->GetAudioDevice(); + video_device_ = factory_->GetVideoDevice(); + desktop_device_ = factory_->GetDesktopDevice(); + audio_processing_ = factory_->GetAudioProcessing(); + event_channel_ = EventChannelProxy::Create(messenger_, task_runner_, kEventChannelName); +} + +FlutterWebRTCBase::~FlutterWebRTCBase() { + LibWebRTC::Terminate(); +} + +EventChannelProxy* FlutterWebRTCBase::event_channel() { + return event_channel_ ? event_channel_.get() : nullptr; +} + +std::string FlutterWebRTCBase::GenerateUUID() { + return libwebrtc::Helper::CreateRandomUuid().std_string(); +} + +RTCPeerConnection* FlutterWebRTCBase::PeerConnectionForId( + const std::string& id) { + auto it = peerconnections_.find(id); + + if (it != peerconnections_.end()) + return (*it).second.get(); + + return nullptr; +} + +void FlutterWebRTCBase::RemovePeerConnectionForId(const std::string& id) { + auto it = peerconnections_.find(id); + if (it != peerconnections_.end()) + peerconnections_.erase(it); +} + +scoped_refptr FlutterWebRTCBase ::MediaTrackForId(const std::string& id) { + auto it = local_tracks_.find(id); + + if (it != local_tracks_.end()) + return (*it).second; + + for (auto kv : peerconnection_observers_) { + auto pco = kv.second.get(); + auto track = pco->MediaTrackForId(id); + if (track != nullptr) + return track; + } + + return nullptr; +} + +void FlutterWebRTCBase::RemoveMediaTrackForId(const std::string& id) { + auto it = local_tracks_.find(id); + if (it != local_tracks_.end()) + local_tracks_.erase(it); +} + +FlutterPeerConnectionObserver* FlutterWebRTCBase::PeerConnectionObserversForId( + const std::string& id) { + auto it = peerconnection_observers_.find(id); + + if (it != peerconnection_observers_.end()) + return (*it).second.get(); + + return nullptr; +} + +void FlutterWebRTCBase::RemovePeerConnectionObserversForId( + const std::string& id) { + auto it = peerconnection_observers_.find(id); + if (it != peerconnection_observers_.end()) + peerconnection_observers_.erase(it); +} + +scoped_refptr FlutterWebRTCBase::MediaStreamForId( + const std::string& id, std::string ownerTag) { + if (!ownerTag.empty()) { + if (ownerTag == "local") { + auto it = local_streams_.find(id); + if (it != local_streams_.end()) { + return (*it).second; + } + } else { + auto pco = peerconnection_observers_.find(ownerTag); + if (peerconnection_observers_.end() != pco) { + auto stream = pco->second->MediaStreamForId(id); + if (stream != nullptr) { + return stream; + } + } + } + } + + auto it = local_streams_.find(id); + if (it != local_streams_.end()) { + return (*it).second; + } + + return nullptr; +} + +void FlutterWebRTCBase::RemoveStreamForId(const std::string& id) { + auto it = local_streams_.find(id); + if (it != local_streams_.end()) + local_streams_.erase(it); +} + +bool FlutterWebRTCBase::ParseConstraints(const EncodableMap& constraints, + RTCConfiguration* configuration) { + memset(&configuration->ice_servers, 0, sizeof(configuration->ice_servers)); + return false; +} + +void FlutterWebRTCBase::ParseConstraints( + const EncodableMap& src, + scoped_refptr mediaConstraints, + ParseConstraintType type /*= kMandatory*/) { + for (auto kv : src) { + EncodableValue k = kv.first; + EncodableValue v = kv.second; + std::string key = GetValue(k); + std::string value; + if (TypeIs(v) || TypeIs(v)) { + } else if (TypeIs(v)) { + value = GetValue(v); + } else if (TypeIs(v)) { + value = std::to_string(GetValue(v)); + } else if (TypeIs(v)) { + value = std::to_string(GetValue(v)); + } else if (TypeIs(v)) { + value = GetValue(v) ? RTCMediaConstraints::kValueTrue + : RTCMediaConstraints::kValueFalse; + } else { + value = std::to_string(GetValue(v)); + } + if (type == kMandatory) { + mediaConstraints->AddMandatoryConstraint(key.c_str(), value.c_str()); + } else { + mediaConstraints->AddOptionalConstraint(key.c_str(), value.c_str()); + if (key == "DtlsSrtpKeyAgreement") { + configuration_.srtp_type = GetValue(v) + ? MediaSecurityType::kDTLS_SRTP + : MediaSecurityType::kSDES_SRTP; + } + } + } +} + +scoped_refptr FlutterWebRTCBase::ParseMediaConstraints( + const EncodableMap& constraints) { + scoped_refptr media_constraints = + RTCMediaConstraints::Create(); + + if (constraints.find(EncodableValue("mandatory")) != constraints.end()) { + auto it = constraints.find(EncodableValue("mandatory")); + const EncodableMap mandatory = GetValue(it->second); + ParseConstraints(mandatory, media_constraints, kMandatory); + } else { + // Log.d(TAG, "mandatory constraints are not a map"); + } + + auto it = constraints.find(EncodableValue("optional")); + if (it != constraints.end()) { + const EncodableValue optional = it->second; + if (TypeIs(optional)) { + ParseConstraints(GetValue(optional), media_constraints, + kOptional); + } else if (TypeIs(optional)) { + const EncodableList list = GetValue(optional); + for (size_t i = 0; i < list.size(); i++) { + ParseConstraints(GetValue(list[i]), media_constraints, + kOptional); + } + } + } else { + // Log.d(TAG, "optional constraints are not an array"); + } + + return media_constraints; +} + +bool FlutterWebRTCBase::CreateIceServers(const EncodableList& iceServersArray, + IceServer* ice_servers) { + size_t size = iceServersArray.size(); + for (size_t i = 0; i < size; i++) { + IceServer& ice_server = ice_servers[i]; + EncodableMap iceServerMap = GetValue(iceServersArray[i]); + + if (iceServerMap.find(EncodableValue("username")) != iceServerMap.end()) { + ice_server.username = GetValue( + iceServerMap.find(EncodableValue("username"))->second); + } + if (iceServerMap.find(EncodableValue("credential")) != iceServerMap.end()) { + ice_server.password = GetValue( + iceServerMap.find(EncodableValue("credential"))->second); + } + + auto it = iceServerMap.find(EncodableValue("url")); + if (it != iceServerMap.end() && TypeIs(it->second)) { + ice_server.uri = GetValue(it->second); + } + it = iceServerMap.find(EncodableValue("urls")); + if (it != iceServerMap.end()) { + if (TypeIs(it->second)) { + ice_server.uri = GetValue(it->second); + } + if (TypeIs(it->second)) { + const EncodableList urls = GetValue(it->second); + for (auto url : urls) { + if (TypeIs(url)) { + const EncodableMap map = GetValue(url); + std::string value; + auto it2 = map.find(EncodableValue("url")); + if (it2 != map.end()) { + ice_server.uri = GetValue(it2->second); + } + } else if (TypeIs(url)) { + ice_server.uri = GetValue(url); + } + } + } + } + } + return size > 0; +} + +bool FlutterWebRTCBase::ParseRTCConfiguration(const EncodableMap& map, + RTCConfiguration& conf) { + auto it = map.find(EncodableValue("iceServers")); + if (it != map.end()) { + const EncodableList iceServersArray = GetValue(it->second); + CreateIceServers(iceServersArray, conf.ice_servers); + } + // iceTransportPolicy (public API) + it = map.find(EncodableValue("iceTransportPolicy")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "all") // public + conf.type = IceTransportsType::kAll; + else if (v == "relay") + conf.type = IceTransportsType::kRelay; + else if (v == "nohost") + conf.type = IceTransportsType::kNoHost; + else if (v == "none") + conf.type = IceTransportsType::kNone; + } + + // bundlePolicy (public api) + it = map.find(EncodableValue("bundlePolicy")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "balanced") // public + conf.bundle_policy = kBundlePolicyBalanced; + else if (v == "max-compat") // public + conf.bundle_policy = kBundlePolicyMaxCompat; + else if (v == "max-bundle") // public + conf.bundle_policy = kBundlePolicyMaxBundle; + } + + // rtcpMuxPolicy (public api) + it = map.find(EncodableValue("rtcpMuxPolicy")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "negotiate") // public + conf.rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyNegotiate; + else if (v == "require") // public + conf.rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyRequire; + } + + // FIXME: peerIdentity of type DOMString (public API) + // FIXME: certificates of type sequence (public API) + // iceCandidatePoolSize of type unsigned short, defaulting to 0 + it = map.find(EncodableValue("iceCandidatePoolSize")); + if (it != map.end()) { + conf.ice_candidate_pool_size = GetValue(it->second); + } + + // sdpSemantics (public api) + it = map.find(EncodableValue("sdpSemantics")); + if (it != map.end() && TypeIs(it->second)) { + std::string v = GetValue(it->second); + if (v == "plan-b") // public + conf.sdp_semantics = SdpSemantics::kPlanB; + else if (v == "unified-plan") // public + conf.sdp_semantics = SdpSemantics::kUnifiedPlan; + } else { + conf.sdp_semantics = SdpSemantics::kUnifiedPlan; + } + + // maxIPv6Networks + it = map.find(EncodableValue("maxIPv6Networks")); + if (it != map.end()) { + conf.max_ipv6_networks = GetValue(it->second); + } + return true; +} + +scoped_refptr FlutterWebRTCBase::MediaTracksForId( + const std::string& id) { + auto it = local_tracks_.find(id); + if (it != local_tracks_.end()) { + return (*it).second; + } + + for (auto it2 : peerconnection_observers_) { + auto pco = it2.second; + auto t = pco->MediaTrackForId(id); + if (t != nullptr) { + return t; + } + } + + return nullptr; +} + +void FlutterWebRTCBase::RemoveTracksForId(const std::string& id) { + auto it = local_tracks_.find(id); + if (it != local_tracks_.end()) + local_tracks_.erase(it); +} + +libwebrtc::scoped_refptr +FlutterWebRTCBase::GetRtpSenderById(RTCPeerConnection* pc, std::string id) { + libwebrtc::scoped_refptr result; + auto senders = pc->senders(); + for (scoped_refptr item : senders.std_vector()) { + std::string itemId = item->id().std_string(); + if (nullptr == result.get() && 0 == id.compare(itemId)) { + result = item; + } + } + return result; +} + +libwebrtc::scoped_refptr +FlutterWebRTCBase::GetRtpReceiverById(RTCPeerConnection* pc, + std::string id) { + libwebrtc::scoped_refptr result; + auto receivers = pc->receivers(); + for (scoped_refptr item : receivers.std_vector()) { + std::string itemId = item->id().std_string(); + if (nullptr == result.get() && 0 == id.compare(itemId)) { + result = item; + } + } + return result; +} + +} // namespace flutter_webrtc_plugin diff --git a/common/darwin/Classes/AudioManager.h b/common/darwin/Classes/AudioManager.h new file mode 100644 index 0000000000..ec7d609cde --- /dev/null +++ b/common/darwin/Classes/AudioManager.h @@ -0,0 +1,19 @@ +#import +#import +#import "AudioProcessingAdapter.h" + +@interface AudioManager : NSObject + +@property(nonatomic, strong) RTCDefaultAudioProcessingModule* _Nonnull audioProcessingModule; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull capturePostProcessingAdapter; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull renderPreProcessingAdapter; + ++ (_Nonnull instancetype)sharedInstance; + +- (void)addLocalAudioRenderer:(nonnull id)renderer; + +- (void)removeLocalAudioRenderer:(nonnull id)renderer; + +@end diff --git a/common/darwin/Classes/AudioManager.m b/common/darwin/Classes/AudioManager.m new file mode 100644 index 0000000000..22fe818e5d --- /dev/null +++ b/common/darwin/Classes/AudioManager.m @@ -0,0 +1,50 @@ +#import "AudioManager.h" +#import "AudioProcessingAdapter.h" + +@implementation AudioManager { + RTCDefaultAudioProcessingModule* _audioProcessingModule; + AudioProcessingAdapter* _capturePostProcessingAdapter; + AudioProcessingAdapter* _renderPreProcessingAdapter; +} + +@synthesize capturePostProcessingAdapter = _capturePostProcessingAdapter; +@synthesize renderPreProcessingAdapter = _renderPreProcessingAdapter; +@synthesize audioProcessingModule = _audioProcessingModule; + ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + static AudioManager* sharedInstance = nil; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + if (self = [super init]) { + _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _capturePostProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _renderPreProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter; + _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter; + } + return self; +} + +- (void)addLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter addAudioRenderer:renderer]; +} + +- (void)removeLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter removeAudioRenderer:renderer]; +} + +- (void)addRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter addAudioRenderer:sink]; +} + +- (void)removeRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter removeAudioRenderer:sink]; +} + +@end diff --git a/common/darwin/Classes/AudioProcessingAdapter.h b/common/darwin/Classes/AudioProcessingAdapter.h new file mode 100644 index 0000000000..751a034764 --- /dev/null +++ b/common/darwin/Classes/AudioProcessingAdapter.h @@ -0,0 +1,26 @@ +#import +#import + +@protocol ExternalAudioProcessingDelegate + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels; + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) * _Nonnull)audioBuffer; + +- (void)audioProcessingRelease; + +@end + +@interface AudioProcessingAdapter : NSObject + +- (nonnull instancetype)init; + +- (void)addProcessing:(id _Nonnull)processor; + +- (void)removeProcessing:(id _Nonnull)processor; + +- (void)addAudioRenderer:(nonnull id)renderer; + +- (void)removeAudioRenderer:(nonnull id)renderer; + +@end diff --git a/common/darwin/Classes/AudioProcessingAdapter.m b/common/darwin/Classes/AudioProcessingAdapter.m new file mode 100644 index 0000000000..8e3c0beba1 --- /dev/null +++ b/common/darwin/Classes/AudioProcessingAdapter.m @@ -0,0 +1,105 @@ +#import "AudioProcessingAdapter.h" +#import +#import + +@implementation AudioProcessingAdapter { + NSMutableArray>* _renderers; + NSMutableArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)init { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [[NSMutableArray> alloc] init]; + _processors = [[NSMutableArray> alloc] init]; + } + return self; +} + +- (void)addProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + [_processors addObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + _processors = [[_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)addAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + _renderers = [[_renderers + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != renderer; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingInitializeWithSampleRate:sampleRateHz channels:channels]; + } + os_unfair_lock_unlock(&_lock); +} + +- (AVAudioPCMBuffer*)toPCMBuffer:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + AVAudioFormat* format = + [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 + sampleRate:audioBuffer.frames * 100.0 + channels:(AVAudioChannelCount)audioBuffer.channels + interleaved:NO]; + AVAudioPCMBuffer* pcmBuffer = + [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:(AVAudioFrameCount)audioBuffer.frames]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return nil; + } + pcmBuffer.frameLength = (AVAudioFrameCount)audioBuffer.frames; + for (int i = 0; i < audioBuffer.channels; i++) { + float* sourceBuffer = [audioBuffer rawBufferForChannel:i]; + int16_t* targetBuffer = (int16_t*)pcmBuffer.int16ChannelData[i]; + for (int frame = 0; frame < audioBuffer.frames; frame++) { + targetBuffer[frame] = sourceBuffer[frame]; + } + } + return pcmBuffer; +} + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingProcess:audioBuffer]; + } + + for (id renderer in _renderers) { + [renderer renderPCMBuffer:[self toPCMBuffer:audioBuffer]]; + } + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingRelease { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingRelease]; + } + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/common/darwin/Classes/AudioUtils.h b/common/darwin/Classes/AudioUtils.h new file mode 100644 index 0000000000..02d6f1c09f --- /dev/null +++ b/common/darwin/Classes/AudioUtils.h @@ -0,0 +1,15 @@ +#if TARGET_OS_IPHONE + +#import + +@interface AudioUtils : NSObject ++ (void)ensureAudioSessionWithRecording:(BOOL)recording; +// needed for wired headphones to use headphone mic ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type; ++ (void)setSpeakerphoneOn:(BOOL)enable; ++ (void)setSpeakerphoneOnButPreferBluetooth; ++ (void)deactiveRtcAudioSession; ++ (void) setAppleAudioConfiguration:(NSDictionary*)configuration; +@end + +#endif diff --git a/common/darwin/Classes/AudioUtils.m b/common/darwin/Classes/AudioUtils.m new file mode 100644 index 0000000000..a2a863b057 --- /dev/null +++ b/common/darwin/Classes/AudioUtils.m @@ -0,0 +1,229 @@ +#if TARGET_OS_IPHONE +#import "AudioUtils.h" +#import + +@implementation AudioUtils + ++ (void)ensureAudioSessionWithRecording:(BOOL)recording { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + // we also need to set default WebRTC audio configuration, since it may be activated after + // this method is called + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + // require audio session to be either PlayAndRecord or MultiRoute + if (recording && session.category != AVAudioSessionCategoryPlayAndRecord && + session.category != AVAudioSessionCategoryMultiRoute) { + config.category = AVAudioSessionCategoryPlayAndRecord; + config.categoryOptions = + AVAudioSessionCategoryOptionAllowBluetooth | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowAirPlay; + + [session lockForConfiguration]; + NSError* error = nil; + bool success = [session setCategory:config.category withOptions:config.categoryOptions error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[true]: setCategory failed due to: %@", error); + success = [session setMode:config.mode error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[true]: setMode failed due to: %@", error); + [session unlockForConfiguration]; + } else if (!recording && (session.category == AVAudioSessionCategoryAmbient || + session.category == AVAudioSessionCategorySoloAmbient)) { + config.mode = AVAudioSessionModeDefault; + [session lockForConfiguration]; + NSError* error = nil; + bool success = [session setMode:config.mode error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[false]: setMode failed due to: %@", error); + [session unlockForConfiguration]; + } +} + ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type { + RTCAudioSession* rtcSession = [RTCAudioSession sharedInstance]; + AVAudioSessionPortDescription* inputPort = nil; + for (AVAudioSessionPortDescription* port in rtcSession.session.availableInputs) { + if ([port.portType isEqualToString:type]) { + inputPort = port; + break; + } + } + if (inputPort != nil) { + NSError* errOut = nil; + [rtcSession lockForConfiguration]; + [rtcSession setPreferredInput:inputPort error:&errOut]; + [rtcSession unlockForConfiguration]; + if (errOut != nil) { + return NO; + } + return YES; + } + return NO; +} + ++ (void)setSpeakerphoneOn:(BOOL)enable { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + + if(enable && config.category != AVAudioSessionCategoryPlayAndRecord) { + NSLog(@"setSpeakerphoneOn: Category option 'defaultToSpeaker' is only applicable with category 'playAndRecord', ignore."); + return; + } + + [session lockForConfiguration]; + NSError* error = nil; + if (!enable) { + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth + error:&error]; + + success = [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); + } else { + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | + AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth + error:&error]; + + success = [session overrideOutputAudioPort:kAudioSessionProperty_OverrideAudioRoute + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); + } + [session unlockForConfiguration]; +} + ++ (void)setSpeakerphoneOnButPreferBluetooth { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + [session lockForConfiguration]; + NSError* error = nil; + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth | + AVAudioSessionCategoryOptionDefaultToSpeaker + error:&error]; + + success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOnButPreferBluetooth: Port override failed due to: %@", error); + + success = [session setActive:YES error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOnButPreferBluetooth: Audio session override failed: %@", error); + else + NSLog(@"AudioSession override with bluetooth preference via setSpeakerphoneOnButPreferBluetooth successfull "); + [session unlockForConfiguration]; +} + ++ (void)deactiveRtcAudioSession { + NSError* error = nil; + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + [session lockForConfiguration]; + if ([session isActive]) { + BOOL success = [session setActive:NO error:&error]; + if (!success) + NSLog(@"RTC Audio session deactive failed: %@", error); + else + NSLog(@"RTC AudioSession deactive is successful "); + } + [session unlockForConfiguration]; +} + + ++ (AVAudioSessionMode)audioSessionModeFromString:(NSString*)mode { + if([@"default_" isEqualToString:mode]) { + return AVAudioSessionModeDefault; + } else if([@"voicePrompt" isEqualToString:mode]) { + return AVAudioSessionModeVoicePrompt; + } else if([@"videoRecording" isEqualToString:mode]) { + return AVAudioSessionModeVideoRecording; + } else if([@"videoChat" isEqualToString:mode]) { + return AVAudioSessionModeVideoChat; + } else if([@"voiceChat" isEqualToString:mode]) { + return AVAudioSessionModeVoiceChat; + } else if([@"gameChat" isEqualToString:mode]) { + return AVAudioSessionModeGameChat; + } else if([@"measurement" isEqualToString:mode]) { + return AVAudioSessionModeMeasurement; + } else if([@"moviePlayback" isEqualToString:mode]) { + return AVAudioSessionModeMoviePlayback; + } else if([@"spokenAudio" isEqualToString:mode]) { + return AVAudioSessionModeSpokenAudio; + } + return AVAudioSessionModeDefault; +} + ++ (AVAudioSessionCategory)audioSessionCategoryFromString:(NSString *)category { + if([@"ambient" isEqualToString:category]) { + return AVAudioSessionCategoryAmbient; + } else if([@"soloAmbient" isEqualToString:category]) { + return AVAudioSessionCategorySoloAmbient; + } else if([@"playback" isEqualToString:category]) { + return AVAudioSessionCategoryPlayback; + } else if([@"record" isEqualToString:category]) { + return AVAudioSessionCategoryRecord; + } else if([@"playAndRecord" isEqualToString:category]) { + return AVAudioSessionCategoryPlayAndRecord; + } else if([@"multiRoute" isEqualToString:category]) { + return AVAudioSessionCategoryMultiRoute; + } + return AVAudioSessionCategoryAmbient; +} + ++ (void) setAppleAudioConfiguration:(NSDictionary*)configuration { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + + NSString* appleAudioCategory = configuration[@"appleAudioCategory"]; + NSArray* appleAudioCategoryOptions = configuration[@"appleAudioCategoryOptions"]; + NSString* appleAudioMode = configuration[@"appleAudioMode"]; + + [session lockForConfiguration]; + + if(appleAudioCategoryOptions != nil) { + config.categoryOptions = 0; + for(NSString* option in appleAudioCategoryOptions) { + if([@"mixWithOthers" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; + } else if([@"duckOthers" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionDuckOthers; + } else if([@"allowBluetooth" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetooth; + } else if([@"allowBluetoothA2DP" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetoothA2DP; + } else if([@"allowAirPlay" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowAirPlay; + } else if([@"defaultToSpeaker" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker; + } + } + } + + if(appleAudioCategory != nil) { + config.category = [AudioUtils audioSessionCategoryFromString:appleAudioCategory]; + [session setCategory:config.category withOptions:config.categoryOptions error:nil]; + } + + if(appleAudioMode != nil) { + config.mode = [AudioUtils audioSessionModeFromString:appleAudioMode]; + [session setMode:config.mode error:nil]; + } + + [session unlockForConfiguration]; + +} + +@end +#endif diff --git a/common/darwin/Classes/CameraUtils.h b/common/darwin/Classes/CameraUtils.h new file mode 100644 index 0000000000..d6859d88ee --- /dev/null +++ b/common/darwin/Classes/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end diff --git a/common/darwin/Classes/CameraUtils.m b/common/darwin/Classes/CameraUtils.m new file mode 100644 index 0000000000..e05d32e055 --- /dev/null +++ b/common/darwin/Classes/CameraUtils.m @@ -0,0 +1,350 @@ +#import "CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +-(AVCaptureDevice*) currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device is nil" details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device does not support torch" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:error.localizedDescription details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"device is nil" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:error.localizedDescription details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"device is nil" details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"device is nil" details:nil]); + return; + } + BOOL reset = ((NSNumber *)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)focusPoint[@"x"]).doubleValue; + y = ((NSNumber *)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void) applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result{ +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"device is nil" details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"device is nil" details:nil]); + return; + } + + BOOL reset = ((NSNumber *)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber *)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Exposure point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + + +@end diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.h b/common/darwin/Classes/FlutterRPScreenRecorder.h new file mode 100644 index 0000000000..638cfb977b --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1,13 @@ +#if TARGET_OS_IPHONE +#import +@interface FlutterRPScreenRecorder : RTCVideoCapturer + +- (void)startCapture; + +// Stops the capture session asynchronously and notifies callback on completion. +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +- (void)stopCapture; + +@end +#endif diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.m b/common/darwin/Classes/FlutterRPScreenRecorder.m new file mode 100644 index 0000000000..8abe8d3fad --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1,94 @@ +#import "FlutterRPScreenRecorder.h" +#if TARGET_OS_IPHONE +#import + +// See: https://developer.apple.com/videos/play/wwdc2017/606/ + +@implementation FlutterRPScreenRecorder { + RPScreenRecorder* screenRecorder; + RTCVideoSource* source; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + source = delegate; + return [super initWithDelegate:delegate]; +} + +- (void)startCapture { + if (screenRecorder == NULL) + screenRecorder = [RPScreenRecorder sharedRecorder]; + + [screenRecorder setMicrophoneEnabled:NO]; + + if (![screenRecorder isAvailable]) { + NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available!"); + return; + } + + if (@available(iOS 11.0, *)) { + [screenRecorder + startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, + RPSampleBufferType bufferType, NSError* _Nullable error) { + if (bufferType == RPSampleBufferTypeVideo) { // We want video only now + [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; + } + } + completionHandler:^(NSError* _Nullable error) { + if (error != nil) + NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); + }]; + } else { + // Fallback on earlier versions + NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available in versions " + @"lower than iOS 11 !"); + } +} + +- (void)stopCapture { + if (@available(iOS 11.0, *)) { + [screenRecorder stopCaptureWithHandler:^(NSError* _Nullable error) { + if (error != nil) + NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); + }]; + } else { + // Fallback on earlier versions + NSLog(@"FlutterRPScreenRecorder.stopCapture: Screen recorder is not available in versions " + @"lower than iOS 11 !"); + } +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if (completionHandler != nil) { + completionHandler(); + } +} + +- (void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer + sampleType:(RPSampleBufferType)sampleType { + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + [source adaptOutputFormatToWidth:(int)(width / 2) height:(int)(height / 2) fps:8]; + + RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t timeStampNs = + CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end +#endif diff --git a/common/darwin/Classes/FlutterRTCAudioSink-Interface.h b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h new file mode 100644 index 0000000000..8a0352333d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1,6 @@ +void RTCAudioSinkCallback (void *object, + const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames); diff --git a/common/darwin/Classes/FlutterRTCAudioSink.h b/common/darwin/Classes/FlutterRTCAudioSink.h new file mode 100644 index 0000000000..34cf46669c --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.h @@ -0,0 +1,14 @@ +#import +#import +#import + +@interface FlutterRTCAudioSink : NSObject + +@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef); +@property (nonatomic) CMAudioFormatDescriptionRef format; + +- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio; + +- (void) close; + +@end diff --git a/common/darwin/Classes/FlutterRTCAudioSink.mm b/common/darwin/Classes/FlutterRTCAudioSink.mm new file mode 100644 index 0000000000..4fb575b398 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1,67 @@ +#import +#import "FlutterRTCAudioSink.h" +#import "RTCAudioSource+Private.h" +#include "media_stream_interface.h" +#include "audio_sink_bridge.cpp" + +@implementation FlutterRTCAudioSink { + AudioSinkBridge *_bridge; + webrtc::AudioSourceInterface* _audioSource; +} + +- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio { + self = [super init]; + rtc::scoped_refptr audioSourcePtr = audio.source.nativeAudioSource; + _audioSource = audioSourcePtr.get(); + _bridge = new AudioSinkBridge((void*)CFBridgingRetain(self)); + _audioSource->AddSink(_bridge); + return self; +} + +- (void) close { + _audioSource->RemoveSink(_bridge); + delete _bridge; + _bridge = nil; + _audioSource = nil; +} + +void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames) +{ + AudioBufferList audioBufferList; + AudioBuffer audioBuffer; + audioBuffer.mData = (void*) audio_data; + audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames; + audioBuffer.mNumberChannels = number_of_channels; + audioBufferList.mNumberBuffers = 1; + audioBufferList.mBuffers[0] = audioBuffer; + AudioStreamBasicDescription audioDescription; + audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels; + audioDescription.mBitsPerChannel = bits_per_sample; + audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels; + audioDescription.mChannelsPerFrame = number_of_channels; + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; + audioDescription.mFramesPerPacket = 1; + audioDescription.mReserved = 0; + audioDescription.mSampleRate = sample_rate; + CMAudioFormatDescriptionRef formatDesc; + CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc); + CMSampleBufferRef buffer; + CMSampleTimingInfo timing; + timing.decodeTimeStamp = kCMTimeInvalid; + timing.presentationTimeStamp = CMTimeMake(0, sample_rate); + timing.duration = CMTimeMake(1, sample_rate); + CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer); + CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList); + @autoreleasepool { + FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object); + sink.format = formatDesc; + if (sink.bufferCallback != nil) { + sink.bufferCallback(buffer); + } else { + NSLog(@"Buffer callback is nil"); + } + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCDataChannel.h b/common/darwin/Classes/FlutterRTCDataChannel.h new file mode 100644 index 0000000000..2b1d685274 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.h @@ -0,0 +1,30 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCDataChannel (Flutter) +@property(nonatomic, strong, nonnull) NSString* peerConnectionId; +@property(nonatomic, strong, nonnull) NSString* flutterChannelId; +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@property(nonatomic, strong, nullable) NSArray* eventQueue; +@end + +@interface FlutterWebRTCPlugin (RTCDataChannel) + +- (void)createDataChannel:(nonnull NSString*)peerConnectionId + label:(nonnull NSString*)label + config:(nonnull RTCDataChannelConfiguration*)config + messenger:(nonnull NSObject*)messenger + result:(nonnull FlutterResult)result; + +- (void)dataChannelClose:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId; + +- (void)dataChannelSend:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + data:(nonnull NSString*)data + type:(nonnull NSString*)type; + +- (void)dataChannelGetBufferedAmount:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + result:(nonnull FlutterResult)result; +@end diff --git a/common/darwin/Classes/FlutterRTCDataChannel.m b/common/darwin/Classes/FlutterRTCDataChannel.m new file mode 100644 index 0000000000..67e1083b2f --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.m @@ -0,0 +1,219 @@ +#import "FlutterRTCDataChannel.h" +#import +#import +#import "FlutterRTCPeerConnection.h" + +@implementation RTCDataChannel (Flutter) + +- (NSString*)peerConnectionId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setPeerConnectionId:(NSString*)peerConnectionId { + objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSArray*)eventQueue { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventQueue:(NSArray*)eventQueue { + objc_setAssociatedObject(self, @selector(eventQueue), eventQueue, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSNumber*)flutterChannelId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterChannelId:(NSNumber*)flutterChannelId { + objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + NSEnumerator* enumerator = [self.eventQueue objectEnumerator]; + id event; + while ((event = enumerator.nextObject) != nil) { + postEvent(sink, event); + }; + self.eventQueue = nil; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (RTCDataChannel) + +- (void)createDataChannel:(nonnull NSString*)peerConnectionId + label:(NSString*)label + config:(RTCDataChannelConfiguration*)config + messenger:(NSObject*)messenger + result:(nonnull FlutterResult)result { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; + + if (nil != dataChannel) { + dataChannel.peerConnectionId = peerConnectionId; + NSString* flutterId = [[NSUUID UUID] UUIDString]; + peerConnection.dataChannels[flutterId] = dataChannel; + dataChannel.flutterChannelId = flutterId; + dataChannel.delegate = self; + dataChannel.eventQueue = nil; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", + peerConnectionId, flutterId] + binaryMessenger:messenger]; + + dataChannel.eventChannel = eventChannel; + [eventChannel setStreamHandler:dataChannel]; + + result(@{ + @"label" : label, + @"id" : [NSNumber numberWithInt:dataChannel.channelId], + @"flutterId" : flutterId + }); + } +} + +- (void)dataChannelClose:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + RTCDataChannel* dataChannel = dataChannels[dataChannelId]; + if (dataChannel) { + FlutterEventChannel* eventChannel = dataChannel.eventChannel; + [dataChannel close]; + [dataChannels removeObjectForKey:dataChannelId]; + [eventChannel setStreamHandler:nil]; + dataChannel.eventChannel = nil; + } +} + +- (void)dataChannelGetBufferedAmount:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + result:(nonnull FlutterResult)result { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId]; + if(dataChannel == NULL || dataChannel.readyState != RTCDataChannelStateOpen) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", @"dataChannelGetBufferedAmount"] + message:[NSString stringWithFormat:@"Error: dataChannel not found or not opened!"] + details:nil]); + } else { + result(@{@"bufferedAmount": @(dataChannel.bufferedAmount)}); + } +} + +- (void)dataChannelSend:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + data:(id)data + type:(NSString*)type { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId]; + + NSData* bytes = [type isEqualToString:@"binary"] ? ((FlutterStandardTypedData*)data).data + : [data dataUsingEncoding:NSUTF8StringEncoding]; + + RTCDataBuffer* buffer = [[RTCDataBuffer alloc] initWithData:bytes + isBinary:[type isEqualToString:@"binary"]]; + [dataChannel sendData:buffer]; +} + +- (NSString*)stringForDataChannelState:(RTCDataChannelState)state { + switch (state) { + case RTCDataChannelStateConnecting: + return @"connecting"; + case RTCDataChannelStateOpen: + return @"open"; + case RTCDataChannelStateClosing: + return @"closing"; + case RTCDataChannelStateClosed: + return @"closed"; + } + return nil; +} + +- (void)sendEvent:(id)event withChannel:(RTCDataChannel*)channel { + if (channel.eventSink) { + postEvent(channel.eventSink, event); + } else { + if (!channel.eventQueue) { + channel.eventQueue = [NSMutableArray array]; + } + channel.eventQueue = [channel.eventQueue arrayByAddingObject:event]; + } +} + +#pragma mark - RTCDataChannelDelegate methods + +// Called when the data channel state has changed. +- (void)dataChannelDidChangeState:(RTCDataChannel*)channel { + [self sendEvent:@{ + @"event" : @"dataChannelStateChanged", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"state" : [self stringForDataChannelState:channel.readyState] + } + withChannel:channel]; +} + +// Called when a data buffer was successfully received. +- (void)dataChannel:(RTCDataChannel*)channel didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer { + NSString* type; + id data; + if (buffer.isBinary) { + type = @"binary"; + data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; + } else { + type = @"text"; + data = [[NSString alloc] initWithData:buffer.data encoding:NSUTF8StringEncoding]; + } + + [self sendEvent:@{ + @"event" : @"dataChannelReceiveMessage", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"type" : type, + @"data" : (data ? data : [NSNull null]) + } + withChannel:channel]; +} + +- (void)dataChannel:(RTCDataChannel*)channel didChangeBufferedAmount:(uint64_t)amount { + [self sendEvent:@{ + @"event" : @"dataChannelBufferedAmountChange", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"bufferedAmount" : [NSNumber numberWithLongLong:channel.bufferedAmount], + @"changedAmount" : [NSNumber numberWithLongLong:amount] + } + withChannel:channel]; +} + +@end diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.h b/common/darwin/Classes/FlutterRTCDesktopCapturer.h new file mode 100644 index 0000000000..75dea33345 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.h @@ -0,0 +1,22 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import +#import + +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (DesktopCapturer) + +- (void)getDisplayMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)getDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result; + +- (void)updateDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result; + +- (void)getDesktopSourceThumbnail:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.m b/common/darwin/Classes/FlutterRTCDesktopCapturer.m new file mode 100644 index 0000000000..fd88a8b689 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.m @@ -0,0 +1,429 @@ +#import + +#import "FlutterRTCDesktopCapturer.h" + +#if TARGET_OS_IPHONE +#import +#import "FlutterBroadcastScreenCapturer.h" +#import "FlutterRPScreenRecorder.h" +#endif + +#import "VideoProcessingAdapter.h" +#import "LocalVideoTrack.h" + +#if TARGET_OS_OSX +RTCDesktopMediaList* _screen = nil; +RTCDesktopMediaList* _window = nil; +NSArray* _captureSources; +#endif + +@implementation FlutterWebRTCPlugin (DesktopCapturer) + +- (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result { + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + RTCVideoSource* videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES]; + NSString* trackUUID = [[NSUUID UUID] UUIDString]; + VideoProcessingAdapter *videoProcessingAdapter = [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + +#if TARGET_OS_IPHONE + BOOL useBroadcastExtension = false; + BOOL presentBroadcastPicker = false; + + id videoConstraints = constraints[@"video"]; + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.deviceId + useBroadcastExtension = + [((NSDictionary*)videoConstraints)[@"deviceId"] hasPrefix:@"broadcast"]; + presentBroadcastPicker = + useBroadcastExtension && + ![((NSDictionary*)videoConstraints)[@"deviceId"] hasSuffix:@"-manual"]; + } + + id screenCapturer; + + if (useBroadcastExtension) { + screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoProcessingAdapter]; + } else { + screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:[videoProcessingAdapter source]]; + } + + [screenCapturer startCapture]; + NSLog(@"start %@ capture", useBroadcastExtension ? @"broadcast" : @"replykit"); + + self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) { + NSLog(@"stop %@ capture, trackID %@", useBroadcastExtension ? @"broadcast" : @"replykit", + trackUUID); + [screenCapturer stopCaptureWithCompletionHandler:handler]; + }; + + if (presentBroadcastPicker) { + NSString* extension = + [[[NSBundle mainBundle] infoDictionary] valueForKey:kRTCScreenSharingExtension]; + + RPSystemBroadcastPickerView* picker = [[RPSystemBroadcastPickerView alloc] init]; + picker.showsMicrophoneButton = false; + if (extension) { + picker.preferredExtension = extension; + } else { + NSLog(@"Not able to find the %@ key", kRTCScreenSharingExtension); + } + SEL selector = NSSelectorFromString(@"buttonPressed:"); + if ([picker respondsToSelector:selector]) { + [picker performSelector:selector withObject:nil]; + } + } +#endif + +#if TARGET_OS_OSX + /* example for constraints: + { + 'audio': false, + 'video": { + 'deviceId': {'exact': sourceId}, + 'mandatory': { + 'frameRate': 30.0 + }, + } + } + */ + NSString* sourceId = nil; + BOOL useDefaultScreen = NO; + NSInteger fps = 30; + id videoConstraints = constraints[@"video"]; + if ([videoConstraints isKindOfClass:[NSNumber class]] && [videoConstraints boolValue] == YES) { + useDefaultScreen = YES; + } else if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + NSDictionary* deviceId = videoConstraints[@"deviceId"]; + if (deviceId != nil && [deviceId isKindOfClass:[NSDictionary class]]) { + if (deviceId[@"exact"] != nil) { + sourceId = deviceId[@"exact"]; + if (sourceId == nil) { + result(@{@"error" : @"No deviceId.exact found"}); + return; + } + } + } else { + // fall back to default screen if no deviceId is specified + useDefaultScreen = YES; + } + id mandatory = videoConstraints[@"mandatory"]; + if (mandatory != nil && [mandatory isKindOfClass:[NSDictionary class]]) { + id frameRate = mandatory[@"frameRate"]; + if (frameRate != nil && [frameRate isKindOfClass:[NSNumber class]]) { + fps = [frameRate integerValue]; + } + } + } + RTCDesktopCapturer* desktopCapturer; + RTCDesktopSource* source = nil; + + if (useDefaultScreen) { + desktopCapturer = [[RTCDesktopCapturer alloc] initWithDefaultScreen:self + captureDelegate:videoProcessingAdapter]; + } else { + source = [self getSourceById:sourceId]; + if (source == nil) { + result(@{@"error" : [NSString stringWithFormat:@"No source found for id: %@", sourceId]}); + return; + } + desktopCapturer = [[RTCDesktopCapturer alloc] initWithSource:source + delegate:self + captureDelegate:videoProcessingAdapter]; + } + [desktopCapturer startCaptureWithFPS:fps]; + NSLog(@"start desktop capture: sourceId: %@, type: %@, fps: %lu", sourceId, + source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", fps); + + self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) { + NSLog(@"stop desktop capture: sourceId: %@, type: %@, trackID %@", sourceId, + source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", trackUUID); + [desktopCapturer stopCapture]; + handler(); + }; +#endif + + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; + + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCVideoTrack* track in mediaStream.videoTracks) { + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result( + @{@"streamId" : mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks}); +} + +- (void)getDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"getDesktopSources"); + + NSArray* types = [argsMap objectForKey:@"types"]; + if (types == nil) { + result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]); + return; + } + + if (![self buildDesktopSourcesListWithTypes:types forceReload:YES result:result]) { + NSLog(@"getDesktopSources failed."); + return; + } + + NSMutableArray* sources = [NSMutableArray array]; + NSEnumerator* enumerator = [_captureSources objectEnumerator]; + RTCDesktopSource* object; + while ((object = enumerator.nextObject) != nil) { + /*NSData *data = nil; + if([object thumbnail]) { + data = [[NSData alloc] init]; + NSImage *resizedImg = [self resizeImage:[object thumbnail] forSize:NSMakeSize(320, 180)]; + data = [resizedImg TIFFRepresentation]; + }*/ + [sources addObject:@{ + @"id" : object.sourceId, + @"name" : object.name, + @"thumbnailSize" : @{@"width" : @0, @"height" : @0}, + @"type" : object.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", + //@"thumbnail": data, + }]; + } + result(@{@"sources" : sources}); +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +- (void)getDesktopSourceThumbnail:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"getDesktopSourceThumbnail"); + NSString* sourceId = argsMap[@"sourceId"]; + RTCDesktopSource* object = [self getSourceById:sourceId]; + if (object == nil) { + result(@{@"error" : @"No source found"}); + return; + } + NSImage* image = [object UpdateThumbnail]; + if (image != nil) { + NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; + NSData* data = [resizedImg TIFFRepresentation]; + result(data); + } else { + result(@{@"error" : @"No thumbnail found"}); + } + +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +- (void)updateDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"updateDesktopSources"); + NSArray* types = [argsMap objectForKey:@"types"]; + if (types == nil) { + result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]); + return; + } + if (![self buildDesktopSourcesListWithTypes:types forceReload:NO result:result]) { + NSLog(@"updateDesktopSources failed."); + return; + } + result(@{@"result" : @YES}); +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +#if TARGET_OS_OSX +- (NSImage*)resizeImage:(NSImage*)sourceImage forSize:(CGSize)targetSize { + CGSize imageSize = sourceImage.size; + CGFloat width = imageSize.width; + CGFloat height = imageSize.height; + CGFloat targetWidth = targetSize.width; + CGFloat targetHeight = targetSize.height; + CGFloat scaleFactor = 0.0; + CGFloat scaledWidth = targetWidth; + CGFloat scaledHeight = targetHeight; + CGPoint thumbnailPoint = CGPointMake(0.0, 0.0); + + if (CGSizeEqualToSize(imageSize, targetSize) == NO) { + CGFloat widthFactor = targetWidth / width; + CGFloat heightFactor = targetHeight / height; + + // scale to fit the longer + scaleFactor = (widthFactor > heightFactor) ? widthFactor : heightFactor; + scaledWidth = ceil(width * scaleFactor); + scaledHeight = ceil(height * scaleFactor); + + // center the image + if (widthFactor > heightFactor) { + thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5; + } else if (widthFactor < heightFactor) { + thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5; + } + } + + NSImage* newImage = [[NSImage alloc] initWithSize:NSMakeSize(scaledWidth, scaledHeight)]; + CGRect thumbnailRect = {thumbnailPoint, {scaledWidth, scaledHeight}}; + NSRect imageRect = NSMakeRect(0.0, 0.0, width, height); + + [newImage lockFocus]; + [sourceImage drawInRect:thumbnailRect fromRect:imageRect operation:NSCompositingOperationCopy fraction:1.0]; + [newImage unlockFocus]; + + return newImage; +} + +- (RTCDesktopSource*)getSourceById:(NSString*)sourceId { + NSEnumerator* enumerator = [_captureSources objectEnumerator]; + RTCDesktopSource* object; + while ((object = enumerator.nextObject) != nil) { + if ([sourceId isEqualToString:object.sourceId]) { + return object; + } + } + return nil; +} + +- (BOOL)buildDesktopSourcesListWithTypes:(NSArray*)types + forceReload:(BOOL)forceReload + result:(FlutterResult)result { + BOOL captureWindow = NO; + BOOL captureScreen = NO; + _captureSources = [NSMutableArray array]; + + NSEnumerator* typesEnumerator = [types objectEnumerator]; + NSString* type; + while ((type = typesEnumerator.nextObject) != nil) { + if ([type isEqualToString:@"screen"]) { + captureScreen = YES; + } else if ([type isEqualToString:@"window"]) { + captureWindow = YES; + } else { + result([FlutterError errorWithCode:@"ERROR" message:@"Invalid type" details:nil]); + return NO; + } + } + + if (!captureWindow && !captureScreen) { + result([FlutterError errorWithCode:@"ERROR" + message:@"At least one type is required" + details:nil]); + return NO; + } + + if (captureWindow) { + if (!_window) + _window = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeWindow delegate:self]; + [_window UpdateSourceList:forceReload updateAllThumbnails:YES]; + NSArray* sources = [_window getSources]; + _captureSources = [_captureSources arrayByAddingObjectsFromArray:sources]; + } + if (captureScreen) { + if (!_screen) + _screen = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeScreen delegate:self]; + [_screen UpdateSourceList:forceReload updateAllThumbnails:YES]; + NSArray* sources = [_screen getSources]; + _captureSources = [_captureSources arrayByAddingObjectsFromArray:sources]; + } + NSLog(@"captureSources: %lu", [_captureSources count]); + return YES; +} + +#pragma mark - RTCDesktopMediaListDelegate delegate + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceAdded: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + NSImage* image = [source UpdateThumbnail]; + NSData* data = [[NSData alloc] init]; + if (image != nil) { + NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; + data = [resizedImg TIFFRepresentation]; + } + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceAdded", + @"id" : source.sourceId, + @"name" : source.name, + @"thumbnailSize" : @{@"width" : @0, @"height" : @0}, + @"type" : source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", + @"thumbnail" : data + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceRemoved: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceRemoved", + @"id" : source.sourceId, + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceNameChanged: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceNameChanged", + @"id" : source.sourceId, + @"name" : source.name, + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceThumbnailChanged: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + NSImage* resizedImg = [self resizeImage:[source thumbnail] forSize:NSMakeSize(320, 180)]; + NSData* data = [resizedImg TIFFRepresentation]; + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceThumbnailChanged", + @"id" : source.sourceId, + @"thumbnail" : data + }); + } +} + +#pragma mark - RTCDesktopCapturerDelegate delegate + +- (void)didSourceCaptureStart:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureStart"); +} + +- (void)didSourceCapturePaused:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCapturePaused"); +} + +- (void)didSourceCaptureStop:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureStop"); +} + +- (void)didSourceCaptureError:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureError"); +} + +#endif + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.h b/common/darwin/Classes/FlutterRTCFrameCapturer.h new file mode 100644 index 0000000000..7cc0ff28c2 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1,17 @@ +#import + +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +@interface FlutterRTCFrameCapturer : NSObject + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result; + ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame; + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m new file mode 100644 index 0000000000..fe748b1223 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1,175 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import "FlutterRTCFrameCapturer.h" + +@import CoreImage; +@import CoreVideo; + +@implementation FlutterRTCFrameCapturer { + RTCVideoTrack* _track; + NSString* _path; + FlutterResult _result; + bool _gotFrame; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result { + self = [super init]; + if (self) { + _gotFrame = false; + _track = track; + _path = path; + _result = result; + [track addRenderer:self]; + } + return self; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame*)frame { + if (_gotFrame || frame == nil) + return; + _gotFrame = true; + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + bool shouldRelease; + if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } else { + pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer; + shouldRelease = false; + } + CIImage* ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; + CGRect outputSize; + if (@available(iOS 11, macOS 10.13, *)) { + switch (frame.rotation) { + case RTCVideoRotation_90: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight]; + outputSize = CGRectMake(0, 0, frame.height, frame.width); + break; + case RTCVideoRotation_180: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown]; + outputSize = CGRectMake(0, 0, frame.width, frame.height); + break; + case RTCVideoRotation_270: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft]; + outputSize = CGRectMake(0, 0, frame.height, frame.width); + break; + default: + outputSize = CGRectMake(0, 0, frame.width, frame.height); + break; + } + } else { + outputSize = CGRectMake(0, 0, frame.width, frame.height); + } + CIContext* tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData* imageData; +#if TARGET_OS_IPHONE + UIImage* uiImage = [UIImage imageWithCGImage:cgImage]; + if ([[_path pathExtension] isEqualToString:@"jpg"]) { + imageData = UIImageJPEGRepresentation(uiImage, 1.0f); + } else { + imageData = UIImagePNGRepresentation(uiImage); + } +#else + NSBitmapImageRep* newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + NSDictionary* quality = @{NSImageCompressionFactor : @1.0f}; + if ([[_path pathExtension] isEqualToString:@"jpg"]) { + imageData = [newRep representationUsingType:NSBitmapImageFileTypeJPEG properties:quality]; + } else { + imageData = [newRep representationUsingType:NSBitmapImageFileTypePNG properties:quality]; + } +#endif + CGImageRelease(cgImage); + if (shouldRelease) + CVPixelBufferRelease(pixelBufferRef); + if (imageData && [imageData writeToFile:_path atomically:NO]) { + NSLog(@"File writed successfully to %@", _path); + _result(nil); + } else { + NSLog(@"Failed to write to file"); + _result([FlutterError errorWithCode:@"CaptureFrameFailed" + message:@"Failed to write image data to file" + details:nil]); + } + dispatch_async(dispatch_get_main_queue(), ^{ + [self->_track removeRenderer:self]; + self->_track = nil; + }); +} + ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame { + id i420Buffer = [frame.buffer toI420]; + CVPixelBufferRef outputPixelBuffer; + size_t w = (size_t)roundf(i420Buffer.width); + size_t h = (size_t)roundf(i420Buffer.height); + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer); + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + [RTCYUVHelper I420ToARGB:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstARGB:dst + dstStrideARGB:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + [RTCYUVHelper I420ToBGRA:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstBGRA:dst + dstStrideBGRA:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } + } + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); + return outputPixelBuffer; +} + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCryptor.h b/common/darwin/Classes/FlutterRTCFrameCryptor.h new file mode 100644 index 0000000000..0e7a2f6007 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCryptor.h @@ -0,0 +1,51 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +#import "FlutterWebRTCPlugin.h" + +@interface RTCFrameCryptor (Flutter) +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@end + + +@interface FlutterWebRTCPlugin (FrameCryptor) + +- (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call result:(nonnull FlutterResult)result; + +- (void)frameCryptorFactoryCreateFrameCryptor:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorSetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorGetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorSetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorGetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorFactoryCreateKeyProvider:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderSetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderRatchetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCryptor.m b/common/darwin/Classes/FlutterRTCFrameCryptor.m new file mode 100644 index 0000000000..b75afc12aa --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCryptor.m @@ -0,0 +1,603 @@ +#import "FlutterRTCFrameCryptor.h" + +#import + +@implementation RTCFrameCryptor (Flutter) + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FrameCryptor) + +- (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call + result:(nonnull FlutterResult)result { + NSDictionary* constraints = call.arguments; + NSString* method = call.method; + if ([method isEqualToString:@"frameCryptorFactoryCreateFrameCryptor"]) { + [self frameCryptorFactoryCreateFrameCryptor:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorSetKeyIndex"]) { + [self frameCryptorSetKeyIndex:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorGetKeyIndex"]) { + [self frameCryptorGetKeyIndex:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorSetEnabled"]) { + [self frameCryptorSetEnabled:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorGetEnabled"]) { + [self frameCryptorGetEnabled:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorDispose"]) { + [self frameCryptorDispose:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorFactoryCreateKeyProvider"]) { + [self frameCryptorFactoryCreateKeyProvider:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetSharedKey"]) { + [self keyProviderSetSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderRatchetSharedKey"]) { + [self keyProviderRatchetSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderExportSharedKey"]) { + [self keyProviderExportSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetKey"]) { + [self keyProviderSetKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderRatchetKey"]) { + [self keyProviderRatchetKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderExportKey"]) { + [self keyProviderExportKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetSifTrailer"]) { + [self keyProviderSetSifTrailer:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderDispose"]) { + [self keyProviderDispose:constraints result:result]; + } else { + result(FlutterMethodNotImplemented); + } +} + +- (RTCCryptorAlgorithm)getAlgorithm:(NSNumber*)algorithm { + switch ([algorithm intValue]) { + case 0: + return RTCCryptorAlgorithmAesGcm; + default: + return RTCCryptorAlgorithmAesGcm; + } +} + +- (void)frameCryptorFactoryCreateFrameCryptor:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* peerConnectionId = constraints[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSNumber* algorithm = constraints[@"algorithm"]; + if (algorithm == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid algorithm" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSString* keyProviderId = constraints[@"keyProviderId"]; + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid keyProviderId" + details:nil]); + return; + } + + RTCFrameCryptorKeyProvider* keyProvider = self.keyProviders[keyProviderId]; + if (keyProvider == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid keyProvider" + details:nil]); + return; + } + + NSString* type = constraints[@"type"]; + NSString* rtpSenderId = constraints[@"rtpSenderId"]; + NSString* rtpReceiverId = constraints[@"rtpReceiverId"]; + + if ([type isEqualToString:@"sender"]) { + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:rtpSenderId]; + if (sender == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + + RTCFrameCryptor* frameCryptor = + [[RTCFrameCryptor alloc] initWithFactory:self.peerConnectionFactory + rtpSender:sender + participantId:participantId + algorithm:[self getAlgorithm:algorithm] + keyProvider:keyProvider]; + NSString* frameCryptorId = [[NSUUID UUID] UUIDString]; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/frameCryptorEvent%@", + frameCryptorId] + binaryMessenger:self.messenger]; + + frameCryptor.eventChannel = eventChannel; + [eventChannel setStreamHandler:frameCryptor]; + frameCryptor.delegate = self; + + self.frameCryptors[frameCryptorId] = frameCryptor; + result(@{@"frameCryptorId" : frameCryptorId}); + } else if ([type isEqualToString:@"receiver"]) { + RTCRtpReceiver* receiver = [self getRtpReceiverById:peerConnection Id:rtpReceiverId]; + if (receiver == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: receiver not found!"] + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = + [[RTCFrameCryptor alloc] initWithFactory:self.peerConnectionFactory + rtpReceiver:receiver + participantId:participantId + algorithm:[self getAlgorithm:algorithm] + keyProvider:keyProvider]; + NSString* frameCryptorId = [[NSUUID UUID] UUIDString]; + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/frameCryptorEvent%@", + frameCryptorId] + binaryMessenger:self.messenger]; + + frameCryptor.eventChannel = eventChannel; + [eventChannel setStreamHandler:frameCryptor]; + frameCryptor.delegate = self; + self.frameCryptors[frameCryptorId] = frameCryptor; + result(@{@"frameCryptorId" : frameCryptorId}); + } else { + result([FlutterError errorWithCode:@"InvalidArgument" message:@"Invalid type" details:nil]); + return; + } +} + +- (void)frameCryptorSetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + [frameCryptor setKeyIndex:[keyIndex intValue]]; + result(@{@"result" : @YES}); +} + +- (void)frameCryptorGetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetKeyIndexFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetKeyIndexFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + result(@{@"keyIndex" : [NSNumber numberWithInt:frameCryptor.keyIndex]}); +} + +- (void)frameCryptorSetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + + NSNumber* enabled = constraints[@"enabled"]; + if (enabled == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid enabled" + details:nil]); + return; + } + frameCryptor.enabled = [enabled boolValue]; + result(@{@"result" : enabled}); +} + +- (void)frameCryptorGetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetEnabledFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetEnabledFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + result(@{@"enabled" : [NSNumber numberWithBool:frameCryptor.enabled]}); +} + +- (void)frameCryptorDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorDisposeFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorDisposeFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + [self.frameCryptors removeObjectForKey:frameCryptorId]; + frameCryptor.enabled = NO; + result(@{@"result" : @"success"}); +} + +- (void)frameCryptorFactoryCreateKeyProvider:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* keyProviderId = [[NSUUID UUID] UUIDString]; + + id keyProviderOptions = constraints[@"keyProviderOptions"]; + if (keyProviderOptions == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid keyProviderOptions" + details:nil]); + return; + } + + NSNumber* sharedKey = keyProviderOptions[@"sharedKey"]; + if (sharedKey == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid sharedKey" + details:nil]); + return; + } + + FlutterStandardTypedData* ratchetSalt = keyProviderOptions[@"ratchetSalt"]; + if (ratchetSalt == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid ratchetSalt" + details:nil]); + return; + } + + NSNumber* ratchetWindowSize = keyProviderOptions[@"ratchetWindowSize"]; + if (ratchetWindowSize == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid ratchetWindowSize" + details:nil]); + return; + } + + NSNumber* failureTolerance = keyProviderOptions[@"failureTolerance"]; + + FlutterStandardTypedData* uncryptedMagicBytes = keyProviderOptions[@"uncryptedMagicBytes"]; + + NSNumber* keyRingSize = keyProviderOptions[@"keyRingSize"]; + + NSNumber* discardFrameWhenCryptorNotReady = keyProviderOptions[@"discardFrameWhenCryptorNotReady"]; + + RTCFrameCryptorKeyProvider* keyProvider = + [[RTCFrameCryptorKeyProvider alloc] initWithRatchetSalt:ratchetSalt.data + ratchetWindowSize:[ratchetWindowSize intValue] + sharedKeyMode:[sharedKey boolValue] + uncryptedMagicBytes: uncryptedMagicBytes != nil ? uncryptedMagicBytes.data : nil + failureTolerance:failureTolerance != nil ? [failureTolerance intValue] : -1 + keyRingSize:keyRingSize != nil ? [keyRingSize intValue] : 0 + discardFrameWhenCryptorNotReady:discardFrameWhenCryptorNotReady != nil ? [discardFrameWhenCryptorNotReady boolValue] : NO]; + self.keyProviders[keyProviderId] = keyProvider; + result(@{@"keyProviderId" : keyProviderId}); +} + +-(nullable RTCFrameCryptorKeyProvider *) getKeyProviderForId:(NSString*)keyProviderId result:(nonnull FlutterResult)result { + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProviderId" + details:nil]); + return nil; + } + RTCFrameCryptorKeyProvider* keyProvider = self.keyProviders[keyProviderId]; + if (keyProvider == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProvider" + details:nil]); + return nil; + } + return keyProvider; +} + +- (void)keyProviderSetSharedKey:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + FlutterStandardTypedData* key = constraints[@"key"]; + if (key == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid key" + details:nil]); + return; + } + + [keyProvider setSharedKey:key.data withIndex:[keyIndex intValue]]; + result(@{@"result" : @YES}); +} + +- (void)keyProviderRatchetSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetSharedKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSData* newKey = [keyProvider ratchetSharedKey:[keyIndex intValue]]; + result(@{@"result" : newKey}); +} + + +- (void)keyProviderExportSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderExportSharedKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSData* key = [keyProvider exportSharedKey:[keyIndex intValue]]; + result(@{@"result" : key}); +} + +- (void)keyProviderSetKey:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + FlutterStandardTypedData* key = constraints[@"key"]; + if (key == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid key" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + [keyProvider setKey:key.data withIndex:[keyIndex intValue] forParticipant:participantId]; + result(@{@"result" : @YES}); +} + +- (void)keyProviderRatchetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSData* newKey = [keyProvider ratchetKey:participantId withIndex:[keyIndex intValue]]; + result(@{@"result" : newKey}); +} + +- (void)keyProviderExportKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderExportKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderExportKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSData* key = [keyProvider exportKey:participantId withIndex:[keyIndex intValue]]; + result(@{@"result" : key}); +} + +- (void)keyProviderSetSifTrailer:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider * keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] result:result]; + if(keyProvider == nil) { + return; + } + + FlutterStandardTypedData* sifTrailer = constraints[@"sifTrailer"]; + if (sifTrailer == nil) { + result([FlutterError errorWithCode:@"keyProviderSetSifTrailerFailed" + message:@"Invalid key" + details:nil]); + return; + } + + [keyProvider setSifTrailer:sifTrailer.data]; + result(nil); +} + +- (void)keyProviderDispose:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + NSString* keyProviderId = constraints[@"keyProviderId"]; + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProviderId" + details:nil]); + return; + } + [self.keyProviders removeObjectForKey:keyProviderId]; + result(@{@"result" : @"success"}); +} + +- (NSString*)stringFromState:(FrameCryptionState)state { + switch (state) { + case FrameCryptionStateNew: + return @"new"; + case FrameCryptionStateOk: + return @"ok"; + case FrameCryptionStateEncryptionFailed: + return @"encryptionFailed"; + case FrameCryptionStateDecryptionFailed: + return @"decryptionFailed"; + case FrameCryptionStateMissingKey: + return @"missingKey"; + case FrameCryptionStateKeyRatcheted: + return @"keyRatcheted"; + case FrameCryptionStateInternalError: + return @"internalError"; + default: + return @"unknown"; + } +} + +#pragma mark - RTCFrameCryptorDelegate methods + +- (void)frameCryptor:(RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor + didStateChangeWithParticipantId:(NSString*)participantId + withState:(FrameCryptionState)stateChanged { + if (frameCryptor.eventSink) { + postEvent(frameCryptor.eventSink, @{ + @"event" : @"frameCryptionStateChanged", + @"participantId" : participantId, + @"state" : [self stringFromState:stateChanged] + }); + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.h b/common/darwin/Classes/FlutterRTCMediaRecorder.h new file mode 100644 index 0000000000..eac82e8b4d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1,24 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import + +@import Foundation; +@import AVFoundation; + +@interface FlutterRTCMediaRecorder : NSObject + +@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; +@property(nonatomic, strong) NSURL* _Nonnull output; +@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter; +@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput; + +- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video + audioTrack:(RTCAudioTrack* _Nullable)audio + outputFile:(NSURL* _Nonnull)out; + +- (void)stop:(_Nonnull FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.m b/common/darwin/Classes/FlutterRTCMediaRecorder.m new file mode 100644 index 0000000000..7661aae519 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1,168 @@ +#import +#import "FlutterRTCMediaRecorder.h" +#import "FlutterRTCAudioSink.h" +#import "FlutterRTCFrameCapturer.h" + +@import AVFoundation; + +@implementation FlutterRTCMediaRecorder { + int framesCount; + bool isInitialized; + CGSize _renderSize; + FlutterRTCAudioSink* _audioSink; + AVAssetWriterInput* _audioWriter; + int64_t _startTime; +} + +- (instancetype)initWithVideoTrack:(RTCVideoTrack *)video audioTrack:(RTCAudioTrack *)audio outputFile:(NSURL *)out { + self = [super init]; + isInitialized = false; + self.videoTrack = video; + self.output = out; + [video addRenderer:self]; + framesCount = 0; + if (audio != nil) + _audioSink = [[FlutterRTCAudioSink alloc] initWithAudioTrack:audio]; + else + NSLog(@"Audio track is nil"); + _startTime = -1; + return self; +} + +- (void)initialize:(CGSize)size { + _renderSize = size; + NSDictionary *videoSettings = @{ + AVVideoCompressionPropertiesKey: @{AVVideoAverageBitRateKey: @(6*1024*1024)}, + AVVideoCodecKey: AVVideoCodecTypeH264, + AVVideoHeightKey: @(size.height), + AVVideoWidthKey: @(size.width), + }; + self.writerInput = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeVideo + outputSettings:videoSettings]; + self.writerInput.expectsMediaDataInRealTime = true; + self.writerInput.mediaTimeScale = 30; + + if (_audioSink != nil) { + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + NSDictionary* audioSettings = @{ + AVFormatIDKey: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], + AVNumberOfChannelsKey: @1, + AVSampleRateKey: @44100.0, + AVChannelLayoutKey: [NSData dataWithBytes:&acl length:sizeof(AudioChannelLayout)], + AVEncoderBitRateKey: @64000, + }; + _audioWriter = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeAudio + outputSettings:audioSettings + sourceFormatHint:_audioSink.format]; + _audioWriter.expectsMediaDataInRealTime = true; + } + + NSError *error; + self.assetWriter = [[AVAssetWriter alloc] + initWithURL:self.output + fileType:AVFileTypeMPEG4 + error:&error]; + if (error != nil) + NSLog(@"%@",[error localizedDescription]); + self.assetWriter.shouldOptimizeForNetworkUse = true; + [self.assetWriter addInput:self.writerInput]; + if (_audioWriter != nil) { + [self.assetWriter addInput:_audioWriter]; + _audioSink.bufferCallback = ^(CMSampleBufferRef buffer){ + if (self->_audioWriter.readyForMoreMediaData) { + if ([self->_audioWriter appendSampleBuffer:buffer]) + NSLog(@"Audio frame appended"); + else + NSLog(@"Audioframe not appended %@", self.assetWriter.error); + } + }; + } + [self.assetWriter startWriting]; + [self.assetWriter startSessionAtSourceTime:kCMTimeZero]; + + isInitialized = true; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame *)frame { + if (frame == nil) { + return; + } + if (!isInitialized) { + [self initialize:CGSizeMake((CGFloat) frame.width, (CGFloat) frame.height)]; + } + if (!self.writerInput.readyForMoreMediaData) { + NSLog(@"Drop frame, not ready"); + return; + } + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + BOOL shouldRelease = false; + if ([buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; + } else { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } + CMVideoFormatDescriptionRef formatDescription; + OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBufferRef, &formatDescription); + + CMSampleTimingInfo timingInfo; + + timingInfo.decodeTimeStamp = kCMTimeInvalid; + if (_startTime == -1) { + _startTime = frame.timeStampNs / 1000; + } + int64_t frameTime = (frame.timeStampNs / 1000) - _startTime; + timingInfo.presentationTimeStamp = CMTimeMake(frameTime, 1000000); + framesCount++; + + CMSampleBufferRef outBuffer; + + status = CMSampleBufferCreateReadyWithImageBuffer( + kCFAllocatorDefault, + pixelBufferRef, + formatDescription, + &timingInfo, + &outBuffer + ); + + if (![self.writerInput appendSampleBuffer:outBuffer]) { + NSLog(@"Frame not appended %@", self.assetWriter.error); + } + #if TARGET_OS_IPHONE + if (shouldRelease) { + CVPixelBufferRelease(pixelBufferRef); + } + #endif +} + +- (void)stop:(FlutterResult _Nonnull) result { + if (_audioSink != nil) { + _audioSink.bufferCallback = nil; + [_audioSink close]; + } + [self.videoTrack removeRenderer:self]; + [self.writerInput markAsFinished]; + [_audioWriter markAsFinished]; + dispatch_async(dispatch_get_main_queue(), ^{ + [self.assetWriter finishWritingWithCompletionHandler:^{ + NSError* error = self.assetWriter.error; + if (error == nil) { + result(nil); + } else { + result([FlutterError errorWithCode:@"Failed to save recording" + message:[error localizedDescription] + details:nil]); + } + }]; + }); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.h b/common/darwin/Classes/FlutterRTCMediaStream.h new file mode 100644 index 0000000000..95a6c960b0 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.h @@ -0,0 +1,23 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface RTCMediaStreamTrack (Flutter) +@property(nonatomic, strong, nonnull) id settings; +@end + +@interface FlutterWebRTCPlugin (RTCMediaStream) + +- (void)getUserMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)createLocalMediaStream:(nonnull FlutterResult)result; + +- (void)getSources:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackCaptureFrame:(nonnull RTCMediaStreamTrack*)track + toPath:(nonnull NSString*)path + result:(nonnull FlutterResult)result; + +- (void)selectAudioInput:(nonnull NSString*)deviceId result:(nullable FlutterResult)result; + +- (void)selectAudioOutput:(nonnull NSString*)deviceId result:(nullable FlutterResult)result; +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m new file mode 100644 index 0000000000..5fb81eda23 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -0,0 +1,990 @@ +#import +#import "AudioUtils.h" +#import "CameraUtils.h" +#import "FlutterRTCFrameCapturer.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCPeerConnection.h" +#import "VideoProcessingAdapter.h" +#import "LocalVideoTrack.h" +#import "LocalAudioTrack.h" + +@implementation RTCMediaStreamTrack (Flutter) + +- (id)settings { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setSettings:(id)settings { + objc_setAssociatedObject(self, @selector(settings), settings, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} +@end + +@implementation AVCaptureDevice (Flutter) + +- (NSString*)positionString { + switch (self.position) { + case AVCaptureDevicePositionUnspecified: + return @"unspecified"; + case AVCaptureDevicePositionBack: + return @"back"; + case AVCaptureDevicePositionFront: + return @"front"; + } + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCMediaStream) + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} + */ +typedef void (^NavigatorUserMediaErrorCallback)(NSString* errorType, NSString* errorMessage); + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} + */ +typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream* mediaStream); + +- (NSDictionary*)defaultVideoConstraints { + return @{@"minWidth" : @"1280", @"minHeight" : @"720", @"minFrameRate" : @"30"}; +} + +- (NSDictionary*)defaultAudioConstraints { + return @{}; +} + + +- (RTCMediaConstraints*)defaultMediaStreamConstraints { + RTCMediaConstraints* constraints = + [[RTCMediaConstraints alloc] initWithMandatoryConstraints:[self defaultVideoConstraints] + optionalConstraints:nil]; + return constraints; +} + + +- (NSArray *) captureDevices { + if (@available(iOS 13.0, macOS 10.15, macCatalyst 14.0, tvOS 17.0, *)) { + NSArray *deviceTypes = @[ +#if TARGET_OS_IPHONE + AVCaptureDeviceTypeBuiltInTripleCamera, + AVCaptureDeviceTypeBuiltInDualCamera, + AVCaptureDeviceTypeBuiltInDualWideCamera, + AVCaptureDeviceTypeBuiltInWideAngleCamera, + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInUltraWideCamera, +#else + AVCaptureDeviceTypeBuiltInWideAngleCamera, +#endif + ]; + +#if !defined(TARGET_OS_IPHONE) + if (@available(macOS 13.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObject:AVCaptureDeviceTypeDeskViewCamera]; + } +#endif + + if (@available(iOS 17.0, macOS 14.0, tvOS 17.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObjectsFromArray: @[ + AVCaptureDeviceTypeContinuityCamera, + AVCaptureDeviceTypeExternal, + ]]; + } + + return [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified].devices; + } + return @[]; +} + +/** + * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the audio-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCAudioTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCAudioTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserAudio:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + id audioConstraints = constraints[@"audio"]; + NSString* audioDeviceId = @""; + RTCMediaConstraints *rtcConstraints; + if ([audioConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.audio.deviceId + NSString* deviceId = audioConstraints[@"deviceId"]; + + if (deviceId) { + audioDeviceId = deviceId; + } + + rtcConstraints = [self parseMediaConstraints:audioConstraints]; + // constraints.audio.optional.sourceId + id optionalConstraints = audioConstraints[@"optional"]; + if (optionalConstraints && [optionalConstraints isKindOfClass:[NSArray class]] && + !deviceId) { + NSArray* options = optionalConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; + if (sourceId) { + audioDeviceId = sourceId; + } + } + } + } + } else { + rtcConstraints = [self parseMediaConstraints:[self defaultAudioConstraints]]; + } + +#if !defined(TARGET_OS_IPHONE) + if (audioDeviceId != nil) { + [self selectAudioInput:audioDeviceId result:nil]; + } +#endif + + NSString* trackId = [[NSUUID UUID] UUIDString]; + RTCAudioSource *audioSource = [self.peerConnectionFactory audioSourceWithConstraints:rtcConstraints]; + RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource trackId:trackId]; + LocalAudioTrack *localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack]; + + audioTrack.settings = @{ + @"deviceId" : audioDeviceId, + @"kind" : @"audioinput", + @"autoGainControl" : @YES, + @"echoCancellation" : @YES, + @"noiseSuppression" : @YES, + @"channelCount" : @1, + @"latency" : @0, + }; + + [mediaStream addAudioTrack:audioTrack]; + + [self.localTracks setObject:localAudioTrack forKey:trackId]; + + [self ensureAudioSession]; + + successCallback(mediaStream); +} + +// TODO: Use RCTConvert for constraints ... +- (void)getUserMedia:(NSDictionary*)constraints result:(FlutterResult)result { + // Initialize RTCMediaStream with a unique label in order to allow multiple + // RTCMediaStream instances initialized by multiple getUserMedia calls to be + // added to 1 RTCPeerConnection instance. As suggested by + // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good + // practice, use a UUID (conforming to RFC4122). + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + [self getUserMedia:constraints + successCallback:^(RTCMediaStream* mediaStream) { + NSString* mediaStreamId = mediaStream.streamId; + + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack* track in mediaStream.audioTracks) { + [audioTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live", + @"settings" : track.settings + }]; + } + + for (RTCVideoTrack* track in mediaStream.videoTracks) { + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live", + @"settings" : track.settings + }]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{ + @"streamId" : mediaStreamId, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks + }); + } + errorCallback:^(NSString* errorType, NSString* errorMessage) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] + message:errorMessage + details:nil]); + } + mediaStream:mediaStream]; +} + +/** + * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which + * satisfies specific constraints and adds it to a specific + * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track + * of the respective media type and the specified {@code constraints} specify + * that a track of the respective media type is required; otherwise, reports + * success for the specified {@code mediaStream} to a specific + * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media + * type-specific iteration of or successfully concludes the + * {@code getUserMedia()} algorithm. The method will be recursively invoked to + * conclude the whole {@code getUserMedia()} algorithm either with (successful) + * satisfaction of the specified {@code constraints} or with failure. + * + * @param constraints The {@code MediaStreamConstraints} which specifies the + * requested media types and which the new {@code RTCAudioTrack} or + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm. + */ +- (void)getUserMedia:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + // If mediaStream contains no audioTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local audio content. + if (mediaStream.audioTracks.count == 0) { + // constraints.audio + id audioConstraints = constraints[@"audio"]; + BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; + if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { + [self requestAccessForMediaType:AVMediaTypeAudio + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } + } + + // If mediaStream contains no videoTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local video content. + if (mediaStream.videoTracks.count == 0) { + // constraints.video + id videoConstraints = constraints[@"video"]; + if (videoConstraints) { + BOOL requestAccessForVideo = [videoConstraints isKindOfClass:[NSNumber class]] + ? [videoConstraints boolValue] + : [videoConstraints isKindOfClass:[NSDictionary class]]; +#if !TARGET_IPHONE_SIMULATOR + if (requestAccessForVideo) { + [self requestAccessForMediaType:AVMediaTypeVideo + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } +#endif + } + } + + // There are audioTracks and/or videoTracks in mediaStream as requested by + // constraints so the getUserMedia() is to conclude with success. + successCallback(mediaStream); +} + +- (int)getConstrainInt:(NSDictionary*)constraints forKey:(NSString*)key { + if (![constraints isKindOfClass:[NSDictionary class]]) { + return 0; + } + + id constraint = constraints[key]; + if ([constraint isKindOfClass:[NSNumber class]]) { + return [constraint intValue]; + } else if ([constraint isKindOfClass:[NSString class]]) { + int possibleValue = [constraint intValue]; + if (possibleValue != 0) { + return possibleValue; + } + } else if ([constraint isKindOfClass:[NSDictionary class]]) { + id idealConstraint = constraint[@"ideal"]; + if ([idealConstraint isKindOfClass:[NSString class]]) { + int possibleValue = [idealConstraint intValue]; + if (possibleValue != 0) { + return possibleValue; + } + } + } + + return 0; +} + +/** + * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the video-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCVideoTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserVideo:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + id videoConstraints = constraints[@"video"]; + AVCaptureDevice* videoDevice; + NSString* videoDeviceId = nil; + NSString* facingMode = nil; + NSArray* captureDevices = [self captureDevices]; + + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.deviceId + NSString* deviceId = videoConstraints[@"deviceId"]; + + if (deviceId) { + for (AVCaptureDevice *device in captureDevices) { + if( [deviceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = deviceId; + } + } + } + + // constraints.video.optional + id optionalVideoConstraints = videoConstraints[@"optional"]; + if (optionalVideoConstraints && [optionalVideoConstraints isKindOfClass:[NSArray class]] && + !videoDevice) { + NSArray* options = optionalVideoConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; + if (sourceId) { + for (AVCaptureDevice *device in captureDevices) { + if( [sourceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = sourceId; + } + } + if (videoDevice) { + break; + } + } + } + } + } + + if (!videoDevice) { + // constraints.video.facingMode + // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode + facingMode = videoConstraints[@"facingMode"]; + if (facingMode && [facingMode isKindOfClass:[NSString class]]) { + AVCaptureDevicePosition position; + if ([facingMode isEqualToString:@"environment"]) { + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionBack; + } else if ([facingMode isEqualToString:@"user"]) { + self._usingFrontCamera = YES; + position = AVCaptureDevicePositionFront; + } else { + // If the specified facingMode value is not supported, fall back to + // the default video device. + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionUnspecified; + } + videoDevice = [self findDeviceForPosition:position]; + } + } + } + + if ([videoConstraints isKindOfClass:[NSNumber class]]) { + videoConstraints = @{@"mandatory": [self defaultVideoConstraints]}; + } + + NSInteger targetWidth = 0; + NSInteger targetHeight = 0; + NSInteger targetFps = 0; + + if (!videoDevice) { + videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + + int possibleWidth = [self getConstrainInt:videoConstraints forKey:@"width"]; + if (possibleWidth != 0) { + targetWidth = possibleWidth; + } + + int possibleHeight = [self getConstrainInt:videoConstraints forKey:@"height"]; + if (possibleHeight != 0) { + targetHeight = possibleHeight; + } + + int possibleFps = [self getConstrainInt:videoConstraints forKey:@"frameRate"]; + if (possibleFps != 0) { + targetFps = possibleFps; + } + + id mandatory = + [videoConstraints isKindOfClass:[NSDictionary class]] ? videoConstraints[@"mandatory"] : nil; + + // constraints.video.mandatory + if (mandatory && [mandatory isKindOfClass:[NSDictionary class]]) { + id widthConstraint = mandatory[@"minWidth"]; + if ([widthConstraint isKindOfClass:[NSString class]] || + [widthConstraint isKindOfClass:[NSNumber class]]) { + int possibleWidth = [widthConstraint intValue]; + if (possibleWidth != 0) { + targetWidth = possibleWidth; + } + } + id heightConstraint = mandatory[@"minHeight"]; + if ([heightConstraint isKindOfClass:[NSString class]] || + [heightConstraint isKindOfClass:[NSNumber class]]) { + int possibleHeight = [heightConstraint intValue]; + if (possibleHeight != 0) { + targetHeight = possibleHeight; + } + } + id fpsConstraint = mandatory[@"minFrameRate"]; + if ([fpsConstraint isKindOfClass:[NSString class]] || + [fpsConstraint isKindOfClass:[NSNumber class]]) { + int possibleFps = [fpsConstraint intValue]; + if (possibleFps != 0) { + targetFps = possibleFps; + } + } + } + + if (videoDevice) { + RTCVideoSource* videoSource = [self.peerConnectionFactory videoSource]; +#if TARGET_OS_OSX + if (self.videoCapturer) { + [self.videoCapturer stopCapture]; + } +#endif + + VideoProcessingAdapter *videoProcessingAdapter = [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoProcessingAdapter]; + + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:targetWidth + targetHeight:targetHeight]; + + CMVideoDimensions selectedDimension = CMVideoFormatDescriptionGetDimensions(selectedFormat.formatDescription); + NSInteger selectedWidth = (NSInteger) selectedDimension.width; + NSInteger selectedHeight = (NSInteger) selectedDimension.height; + NSInteger selectedFps = [self selectFpsForFormat:selectedFormat targetFps:targetFps]; + + self._lastTargetFps = selectedFps; + self._lastTargetWidth = targetWidth; + self._lastTargetHeight = targetHeight; + + NSLog(@"target format %ldx%ld, targetFps: %ld, selected format: %ldx%ld, selected fps %ld", targetWidth, targetHeight, targetFps, selectedWidth, selectedHeight, selectedFps); + + if ([videoDevice lockForConfiguration:NULL]) { + @try { + videoDevice.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)selectedFps); + videoDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)selectedFps); + } @catch (NSException* exception) { + NSLog(@"Failed to set active frame rate!\n User info:%@", exception.userInfo); + } + [videoDevice unlockForConfiguration]; + } + + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:selectedFps + completionHandler:^(NSError* error) { + if (error) { + NSLog(@"Start capture error: %@", [error localizedDescription]); + } + }]; + + NSString* trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:trackUUID]; + LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + + __weak RTCCameraVideoCapturer* capturer = self.videoCapturer; + self.videoCapturerStopHandlers[videoTrack.trackId] = ^(CompletionHandler handler) { + NSLog(@"Stop video capturer, trackID %@", videoTrack.trackId); + [capturer stopCaptureWithCompletionHandler:handler]; + }; + + if (!videoDeviceId) { + videoDeviceId = videoDevice.uniqueID; + } + + if (!facingMode) { + facingMode = videoDevice.position == AVCaptureDevicePositionBack ? @"environment" + : videoDevice.position == AVCaptureDevicePositionFront ? @"user" + : @"unspecified"; + } + + videoTrack.settings = @{ + @"deviceId" : videoDeviceId, + @"kind" : @"videoinput", + @"width" : [NSNumber numberWithInteger:selectedWidth], + @"height" : [NSNumber numberWithInteger:selectedHeight], + @"frameRate" : [NSNumber numberWithInteger:selectedFps], + @"facingMode" : facingMode, + }; + + [mediaStream addVideoTrack:videoTrack]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; + + successCallback(mediaStream); + } else { + // According to step 6.2.3 of the getUserMedia() algorithm, if there is no + // source, fail with a new OverconstrainedError. + errorCallback(@"OverconstrainedError", /* errorMessage */ nil); + } +} + +- (void)mediaStreamRelease:(RTCMediaStream*)stream { + if (stream) { + for (RTCVideoTrack* track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack* track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:stream.streamId]; + } +} + +/** + * Obtains local media content of a specific type. Requests access for the + * specified {@code mediaType} if necessary. In other words, implements a media + * type-specific iteration of the {@code getUserMedia()} algorithm. + * + * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} + * which specifies the type of the local media content to obtain. + * @param constraints The {@code MediaStreamConstraints} which are to be + * satisfied by the obtained local media content. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is to collect the + * obtained local media content of the specified {@code mediaType}. + */ +- (void)requestAccessForMediaType:(NSString*)mediaType + constraints:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + // According to step 6.2.1 of the getUserMedia() algorithm, if there is no + // source, fail "with a new DOMException object whose name attribute has the + // value NotFoundError." + // XXX The following approach does not work for audio in Simulator. That is + // because audio capture is done using AVAudioSession which does not use + // AVCaptureDevice there. Anyway, Simulator will not (visually) request access + // for audio. + if (mediaType == AVMediaTypeVideo && [self captureDevices].count == 0) { + // Since successCallback and errorCallback are asynchronously invoked + // elsewhere, make sure that the invocation here is consistent. + dispatch_async(dispatch_get_main_queue(), ^{ + errorCallback(@"DOMException", @"NotFoundError"); + }); + return; + } + +#if TARGET_OS_OSX + if (@available(macOS 10.14, *)) { +#endif + [AVCaptureDevice requestAccessForMediaType:mediaType + completionHandler:^(BOOL granted) { + dispatch_async(dispatch_get_main_queue(), ^{ + if (granted) { + NavigatorUserMediaSuccessCallback scb = + ^(RTCMediaStream* mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } else { + // According to step 10 Permission Failure of the getUserMedia() + // algorithm, if the user has denied permission, fail "with a new + // DOMException object whose name attribute has the value + // NotAllowedError." + errorCallback(@"DOMException", @"NotAllowedError"); + } + }); + }]; +#if TARGET_OS_OSX + } else { + // Fallback on earlier versions + NavigatorUserMediaSuccessCallback scb = ^(RTCMediaStream* mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } +#endif +} + +- (void)createLocalMediaStream:(FlutterResult)result { + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId" : [mediaStream streamId]}); +} + +- (void)getSources:(FlutterResult)result { + NSMutableArray* sources = [NSMutableArray array]; + NSArray* videoDevices = [self captureDevices]; + for (AVCaptureDevice* device in videoDevices) { + [sources addObject:@{ + @"facing" : device.positionString, + @"deviceId" : device.uniqueID, + @"label" : device.localizedName, + @"kind" : @"videoinput", + }]; + } +#if TARGET_OS_IPHONE + + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription* port in session.session.availableInputs) { + // NSLog(@"input portName: %@, type %@", port.portName,port.portType); + [sources addObject:@{ + @"deviceId" : port.UID, + @"label" : port.portName, + @"groupId" : port.portType, + @"kind" : @"audioinput", + }]; + } + + for (AVAudioSessionPortDescription* port in session.currentRoute.outputs) { + // NSLog(@"output portName: %@, type %@", port.portName,port.portType); + if (session.currentRoute.outputs.count == 1 && ![port.UID isEqualToString:@"Speaker"]) { + [sources addObject:@{ + @"deviceId" : @"Speaker", + @"label" : @"Speaker", + @"groupId" : @"Speaker", + @"kind" : @"audiooutput", + }]; + } + [sources addObject:@{ + @"deviceId" : port.UID, + @"label" : port.portName, + @"groupId" : port.portType, + @"kind" : @"audiooutput", + }]; + } +#endif +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + + NSArray* inputDevices = [audioDeviceModule inputDevices]; + for (RTCIODevice* device in inputDevices) { + [sources addObject:@{ + @"deviceId" : device.deviceId, + @"label" : device.name, + @"kind" : @"audioinput", + }]; + } + + NSArray* outputDevices = [audioDeviceModule outputDevices]; + for (RTCIODevice* device in outputDevices) { + [sources addObject:@{ + @"deviceId" : device.deviceId, + @"label" : device.name, + @"kind" : @"audiooutput", + }]; + } +#endif + result(@{@"sources" : sources}); +} + +- (void)selectAudioInput:(NSString*)deviceId result:(FlutterResult)result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray* inputDevices = [audioDeviceModule inputDevices]; + for (RTCIODevice* device in inputDevices) { + if ([deviceId isEqualToString:device.deviceId]) { + [audioDeviceModule setInputDevice:device]; + if (result) + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription* port in session.session.availableInputs) { + if ([port.UID isEqualToString:deviceId]) { + if (self.preferredInput != port.portType) { + self.preferredInput = port.portType; + [AudioUtils selectAudioInput:self.preferredInput]; + } + break; + } + } + if (result) + result(nil); +#endif + if (result) + result([FlutterError errorWithCode:@"selectAudioInputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +- (void)selectAudioOutput:(NSString*)deviceId result:(FlutterResult)result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray* outputDevices = [audioDeviceModule outputDevices]; + for (RTCIODevice* device in outputDevices) { + if ([deviceId isEqualToString:device.deviceId]) { + [audioDeviceModule setOutputDevice:device]; + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + NSError* setCategoryError = nil; + + if ([deviceId isEqualToString:@"Speaker"]) { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker + error:&setCategoryError]; + } else { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&setCategoryError]; + } + + if (setCategoryError == nil) { + result(nil); + return; + } + + result([FlutterError + errorWithCode:@"selectAudioOutputFailed" + message:[NSString + stringWithFormat:@"Error: %@", [setCategoryError localizedFailureReason]] + details:nil]); + +#endif + result([FlutterError errorWithCode:@"selectAudioOutputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +- (void)mediaStreamTrackRelease:(RTCMediaStream*)mediaStream track:(RTCMediaStreamTrack*)track { + // what's different to mediaStreamTrackStop? only call mediaStream explicitly? + if (mediaStream && track) { + track.isEnabled = NO; + // FIXME this is called when track is removed from the MediaStream, + // but it doesn't mean it can not be added back using MediaStream.addTrack + // TODO: [self.localTracks removeObjectForKey:trackID]; + if ([track.kind isEqualToString:@"audio"]) { + [mediaStream removeAudioTrack:(RTCAudioTrack*)track]; + } else if ([track.kind isEqualToString:@"video"]) { + [mediaStream removeVideoTrack:(RTCVideoTrack*)track]; + } + } +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + result(@NO); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + result(@NO); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set torch"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set torch"); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"Not supported on macOS. Can't set zoom"); + return; +#endif +#if TARGET_OS_IPHONE + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set zoom"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set zoom"); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#endif +} + +- (void)mediaStreamTrackCaptureFrame:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result { + self.frameCapturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track + toPath:path + result:result]; +} + +- (void)mediaStreamTrackStop:(RTCMediaStreamTrack*)track { + if (track) { + track.isEnabled = NO; + [self.localTracks removeObjectForKey:track.trackId]; + } +} + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + if(captureDevices.count > 0) { + return captureDevices[0]; + } + return nil; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); +#if TARGET_OS_IPHONE + if (@available(iOS 13.0, *)) { + if(format.isMultiCamSupported != AVCaptureMultiCamSession.multiCamSupported) { + continue; + } + } +#endif + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.h b/common/darwin/Classes/FlutterRTCPeerConnection.h new file mode 100644 index 0000000000..bd86076209 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1,65 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCPeerConnection (Flutter) +@property(nonatomic, strong, nonnull) NSMutableDictionary* dataChannels; +@property(nonatomic, strong, nonnull) + NSMutableDictionary* remoteStreams; +@property(nonatomic, strong, nonnull) + NSMutableDictionary* remoteTracks; +@property(nonatomic, strong, nonnull) NSString* flutterId; +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCPeerConnection) + +- (void)peerConnectionCreateOffer:(nonnull NSDictionary*)constraints + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionCreateAnswer:(nonnull NSDictionary*)constraints + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionSetLocalDescription:(nonnull RTCSessionDescription*)sdp + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionSetRemoteDescription:(nonnull RTCSessionDescription*)sdp + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionAddICECandidate:(nonnull RTCIceCandidate*)candidate + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (nonnull RTCMediaConstraints*)parseMediaConstraints:(nonnull NSDictionary*)constraints; + +- (void)peerConnectionSetConfiguration:(nonnull RTCConfiguration*)configuration + peerConnection:(nonnull RTCPeerConnection*)peerConnection; + +- (void)peerConnectionGetRtpReceiverCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (nullable NSString*)stringForSignalingState:(RTCSignalingState)state; + +- (nullable NSString*)stringForICEGatheringState:(RTCIceGatheringState)state; + +- (nullable NSString*)stringForICEConnectionState:(RTCIceConnectionState)state; + +- (nullable NSString*)stringForPeerConnectionState:(RTCPeerConnectionState)state; + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.m b/common/darwin/Classes/FlutterRTCPeerConnection.m new file mode 100644 index 0000000000..d37eafde10 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1,850 @@ +#import "FlutterRTCPeerConnection.h" +#import +#import "AudioUtils.h" +#import "FlutterRTCDataChannel.h" +#import "FlutterWebRTCPlugin.h" + +#import + +@implementation RTCPeerConnection (Flutter) + +@dynamic eventSink; + +- (NSString*)flutterId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterId:(NSString*)flutterId { + objc_setAssociatedObject(self, @selector(flutterId), flutterId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)dataChannels { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setDataChannels:(NSMutableDictionary*)dataChannels { + objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)remoteStreams { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteStreams:(NSMutableDictionary*)remoteStreams { + objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)remoteTracks { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteTracks:(NSMutableDictionary*)remoteTracks { + objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCPeerConnection) + +- (void)peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection { + [peerConnection setConfiguration:configuration]; +} + +- (void)peerConnectionCreateOffer:(NSDictionary*)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + offerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription* sdp, NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"CreateOfferFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + }]; +} + +- (void)peerConnectionCreateAnswer:(NSDictionary*)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + answerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription* sdp, NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"CreateAnswerFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + }]; +} + +- (void)peerConnectionSetLocalDescription:(RTCSessionDescription*)sdp + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + setLocalDescription:sdp + completionHandler:^(NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"SetLocalDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionSetRemoteDescription:(RTCSessionDescription*)sdp + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + setRemoteDescription:sdp + completionHandler:^(NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"SetRemoteDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + addIceCandidate:candidate + completionHandler:^(NSError* _Nullable error) { + if (error) { + result([FlutterError + errorWithCode:@"AddIceCandidateFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionClose:(RTCPeerConnection*)peerConnection { + [peerConnection close]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + for (NSString* dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; +} + +- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result { + RTCRtpSender* sender = nil; + RTCRtpReceiver* receiver = nil; + + for (RTCRtpSender* s in peerConnection.senders) { + if (s.track != nil && [s.track.trackId isEqualToString:trackID]) { + sender = s; + } + } + + for (RTCRtpReceiver* r in peerConnection.receivers) { + if (r.track != nil && [r.track.trackId isEqualToString:trackID]) { + receiver = r; + } + } + + if (sender != nil) { + [peerConnection statisticsForSender:sender + completionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; + } else if (receiver != nil) { + [peerConnection statisticsForReceiver:receiver + completionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; + } else { + result([FlutterError errorWithCode:@"GetStatsFailed" + message:[NSString stringWithFormat:@"Error %@", @""] + details:nil]); + } +} + +- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result { + [peerConnection statisticsWithCompletionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; +} + +- (NSString*)stringForICEConnectionState:(RTCIceConnectionState)state { + switch (state) { + case RTCIceConnectionStateNew: + return @"new"; + case RTCIceConnectionStateChecking: + return @"checking"; + case RTCIceConnectionStateConnected: + return @"connected"; + case RTCIceConnectionStateCompleted: + return @"completed"; + case RTCIceConnectionStateFailed: + return @"failed"; + case RTCIceConnectionStateDisconnected: + return @"disconnected"; + case RTCIceConnectionStateClosed: + return @"closed"; + case RTCIceConnectionStateCount: + return @"count"; + } + return nil; +} + +- (NSString*)stringForICEGatheringState:(RTCIceGatheringState)state { + switch (state) { + case RTCIceGatheringStateNew: + return @"new"; + case RTCIceGatheringStateGathering: + return @"gathering"; + case RTCIceGatheringStateComplete: + return @"complete"; + } + return nil; +} + +- (NSString*)stringForSignalingState:(RTCSignalingState)state { + switch (state) { + case RTCSignalingStateStable: + return @"stable"; + case RTCSignalingStateHaveLocalOffer: + return @"have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: + return @"have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: + return @"have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: + return @"have-remote-pranswer"; + case RTCSignalingStateClosed: + return @"closed"; + } + return nil; +} + +- (NSString*)stringForPeerConnectionState:(RTCPeerConnectionState)state { + switch (state) { + case RTCPeerConnectionStateNew: + return @"new"; + case RTCPeerConnectionStateConnecting: + return @"connecting"; + case RTCPeerConnectionStateConnected: + return @"connected"; + case RTCPeerConnectionStateDisconnected: + return @"disconnected"; + case RTCPeerConnectionStateFailed: + return @"failed"; + case RTCPeerConnectionStateClosed: + return @"closed"; + } + return nil; +} + +/** + * Parses the constraint keys and values of a specific JavaScript object into + * a specific NSMutableDictionary in a format suitable for the + * initialization of a RTCMediaConstraints instance. + * + * @param src The JavaScript object which defines constraint keys and values and + * which is to be parsed into the specified dst. + * @param dst The NSMutableDictionary into which the constraint keys + * and values defined by src are to be written in a format suitable for + * the initialization of a RTCMediaConstraints instance. + */ +- (void)parseJavaScriptConstraints:(NSDictionary*)src + intoWebRTCConstraints:(NSMutableDictionary*)dst { + for (id srcKey in src) { + id srcValue = src[srcKey]; + NSString* dstValue; + + if ([srcValue isKindOfClass:[NSNumber class]]) { + dstValue = [srcValue boolValue] ? @"true" : @"false"; + } else { + dstValue = [srcValue description]; + } + dst[[srcKey description]] = dstValue; + } +} + +/** + * Parses a JavaScript object into a new RTCMediaConstraints instance. + * + * @param constraints The JavaScript object to parse into a new + * RTCMediaConstraints instance. + * @returns A new RTCMediaConstraints instance initialized with the + * mandatory and optional constraint keys and values specified by + * constraints. + */ +- (RTCMediaConstraints*)parseMediaConstraints:(NSDictionary*)constraints { + id mandatory = constraints[@"mandatory"]; + NSMutableDictionary* mandatory_ = [NSMutableDictionary new]; + + if ([mandatory isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary*)mandatory intoWebRTCConstraints:mandatory_]; + } + + id optional = constraints[@"optional"]; + NSMutableDictionary* optional_ = [NSMutableDictionary new]; + + if ([optional isKindOfClass:[NSArray class]]) { + for (id o in (NSArray*)optional) { + if ([o isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary*)o intoWebRTCConstraints:optional_]; + } + } + } + + return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ + optionalConstraints:optional_]; +} + +#pragma mark - RTCPeerConnectionDelegate methods +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeSignalingState:(RTCSignalingState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{@"event" : @"signalingState", @"state" : [self stringForSignalingState:newState]}); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + mediaStream:(RTCMediaStream*)stream + didAddTrack:(RTCVideoTrack*)track { + peerConnection.remoteTracks[track.trackId] = track; + NSString* streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onAddTrack", + @"streamId" : streamId, + @"trackId" : track.trackId, + @"track" : @{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + mediaStream:(RTCMediaStream*)stream + didRemoveTrack:(RTCVideoTrack*)track { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + NSString* streamId = stream.streamId; + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRemoveTrack", + @"streamId" : streamId, + @"trackId" : track.trackId, + @"track" : @{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didAddStream:(RTCMediaStream*)stream { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + BOOL hasAudio = NO; + for (RTCAudioTrack* track in stream.audioTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [audioTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + hasAudio = YES; + } + + for (RTCVideoTrack* track in stream.videoTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + } + + NSString* streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + if (hasAudio) { + [self ensureAudioSession]; + } + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onAddStream", + @"streamId" : streamId, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks, + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didRemoveStream:(RTCMediaStream*)stream { + NSArray* keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; + // We assume there can be only one object for 1 key + if (keysArray.count > 1) { + NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", + stream.streamId); + } + NSString* streamId = stream.streamId; + + for (RTCVideoTrack* track in stream.videoTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack* track in stream.audioTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRemoveStream", + @"streamId" : streamId, + }); + } +} + +- (void)peerConnectionShouldNegotiate:(RTCPeerConnection*)peerConnection { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRenegotiationNeeded", + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeIceConnectionState:(RTCIceConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"iceConnectionState", + @"state" : [self stringForICEConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeIceGatheringState:(RTCIceGatheringState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{@"event" : @"iceGatheringState", @"state" : [self stringForICEGatheringState:newState]}); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didGenerateIceCandidate:(RTCIceCandidate*)candidate { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onCandidate", + @"candidate" : @{ + @"candidate" : candidate.sdp, + @"sdpMLineIndex" : @(candidate.sdpMLineIndex), + @"sdpMid" : candidate.sdpMid + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didOpenDataChannel:(RTCDataChannel*)dataChannel { + if (-1 == dataChannel.channelId) { + return; + } + + NSString* flutterChannelId = [[NSUUID UUID] UUIDString]; + NSNumber* dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; + dataChannel.peerConnectionId = peerConnection.flutterId; + dataChannel.delegate = self; + peerConnection.dataChannels[flutterChannelId] = dataChannel; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", + peerConnection.flutterId, flutterChannelId] + binaryMessenger:self.messenger]; + + dataChannel.eventChannel = eventChannel; + dataChannel.flutterChannelId = flutterChannelId; + dataChannel.eventQueue = nil; + + dispatch_async(dispatch_get_main_queue(), ^{ + // setStreamHandler on main thread + [eventChannel setStreamHandler:dataChannel]; + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"didOpenDataChannel", + @"id" : dataChannelId, + @"label" : dataChannel.label, + @"flutterId" : flutterChannelId + }); + } + }); +} + +/** Called any time the PeerConnectionState changes. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeConnectionState:(RTCPeerConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"peerConnectionState", + @"state" : [self stringForPeerConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didStartReceivingOnTransceiver:(RTCRtpTransceiver*)transceiver { +} + +/** Called when a receiver and its track are created. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didAddReceiver:(RTCRtpReceiver*)rtpReceiver + streams:(NSArray*)mediaStreams { + // For unified-plan + NSMutableArray* streams = [NSMutableArray array]; + for (RTCMediaStream* stream in mediaStreams) { + [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; + } + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + NSMutableDictionary* event = [NSMutableDictionary dictionary]; + [event addEntriesFromDictionary:@{ + @"event" : @"onTrack", + @"track" : [self mediaTrackToMap:rtpReceiver.track], + @"receiver" : [self receiverToMap:rtpReceiver], + @"streams" : streams, + }]; + + if (peerConnection.configuration.sdpSemantics == RTCSdpSemanticsUnifiedPlan) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver != nil && + [transceiver.receiver.receiverId isEqualToString:rtpReceiver.receiverId]) { + [event setValue:[self transceiverToMap:transceiver] forKey:@"transceiver"]; + } + } + } + + peerConnection.remoteTracks[rtpReceiver.track.trackId] = rtpReceiver.track; + if (mediaStreams.count > 0) { + peerConnection.remoteStreams[mediaStreams[0].streamId] = mediaStreams[0]; + } + + if ([rtpReceiver.track.kind isEqualToString:@"audio"]) { + [self ensureAudioSession]; + } + postEvent(eventSink, event); + } +} + +/** Called when the receiver and its track are removed. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didRemoveReceiver:(RTCRtpReceiver*)rtpReceiver { +} + +/** Called when the selected ICE candidate pair is changed. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeLocalCandidate:(RTCIceCandidate*)local + remoteCandidate:(RTCIceCandidate*)remote + lastReceivedMs:(int)lastDataReceivedMs + changeReason:(NSString*)reason { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onSelectedCandidatePairChanged", + @"local" : @{ + @"candidate" : local.sdp, + @"sdpMLineIndex" : @(local.sdpMLineIndex), + @"sdpMid" : local.sdpMid + }, + @"remote" : @{ + @"candidate" : remote.sdp, + @"sdpMLineIndex" : @(remote.sdpMLineIndex), + @"sdpMid" : remote.sdpMid + }, + @"reason" : reason, + @"lastDataReceivedMs" : @(lastDataReceivedMs) + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didRemoveIceCandidates:(NSArray*)candidates { +} + +NSString* mediaTypeFromString(NSString* kind) { + NSString* mediaType = kRTCMediaStreamTrackKindAudio; + if ([kind isEqualToString:@"audio"]) { + mediaType = kRTCMediaStreamTrackKindAudio; + } else if ([kind isEqualToString:@"video"]) { + mediaType = kRTCMediaStreamTrackKindVideo; + } + return mediaType; +} + +NSString* parametersToString(NSDictionary* parameters) { + NSMutableArray* kvs = [NSMutableArray array]; + for (NSString* key in parameters) { + if (key.length > 0) { + [kvs addObject:[NSString stringWithFormat:@"%@=%@", key, parameters[key]]]; + } else { + [kvs addObject:parameters[key]]; + } + } + return [kvs componentsJoinedByString:@";"]; +} + +NSDictionary* stringToParameters(NSString* str) { + NSMutableDictionary* parameters = [NSMutableDictionary dictionary]; + NSArray* kvs = [str componentsSeparatedByString:@";"]; + for (NSString* kv in kvs) { + NSArray* kvArr = [kv componentsSeparatedByString:@"="]; + if (kvArr.count == 2) { + parameters[kvArr[0]] = kvArr[1]; + } else if (kvArr.count == 1) { + parameters[@""] = kvArr[0]; + } + } + return parameters; +} + +- (void)peerConnectionGetRtpReceiverCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* kind = argsMap[@"kind"]; + RTCRtpCapabilities* caps = + [self.peerConnectionFactory rtpReceiverCapabilitiesForKind:mediaTypeFromString(kind)]; + NSMutableArray* codecsMap = [NSMutableArray array]; + for (RTCRtpCodecCapability* c in caps.codecs) { + if ([kind isEqualToString:@"audio"]) { + [codecsMap addObject:@{ + @"channels" : c.numChannels, + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } else if ([kind isEqualToString:@"video"]) { + [codecsMap addObject:@{ + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } + } + result(@{ + @"codecs" : codecsMap, + @"headerExtensions" : @[], + @"fecMechanisms" : @[], + }); +} + +- (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* kind = argsMap[@"kind"]; + RTCRtpCapabilities* caps = + [self.peerConnectionFactory rtpSenderCapabilitiesForKind:mediaTypeFromString(kind)]; + NSMutableArray* codecsMap = [NSMutableArray array]; + for (RTCRtpCodecCapability* c in caps.codecs) { + if ([kind isEqualToString:@"audio"]) { + [codecsMap addObject:@{ + @"channels" : c.numChannels, + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } else if ([kind isEqualToString:@"video"]) { + [codecsMap addObject:@{ + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } + } + result(@{ + @"codecs" : codecsMap, + @"headerExtensions" : @[], + @"fecMechanisms" : @[], + }); +} + +-(RTC_OBJC_TYPE(RTCRtpCodecCapability) *) findCodecCapability:(NSString *)kind + codec:(NSString *)codec + parameters:(NSDictionary*)parameters { + RTCRtpCapabilities* caps = [self.peerConnectionFactory rtpSenderCapabilitiesForKind: [kind isEqualToString:@"video"]? kRTCMediaStreamTrackKindVideo : kRTCMediaStreamTrackKindAudio]; + for(RTCRtpCodecCapability* capCodec in caps.codecs) { + if([capCodec.name isEqualToString:codec] && [capCodec.kind isEqualToString:kind]) { + BOOL matched = YES; + for(NSString* key in capCodec.parameters) { + NSString *value = [capCodec.parameters objectForKey:key]; + NSString *value2 = [parameters objectForKey:key]; + if(![value isEqualToString:value2]) { + matched = NO; + } + } + if(matched) { + return capCodec; + } + } + } + return nil; +} + +- (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:@"transceiverSetCodecPreferencesFailed" + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError errorWithCode:@"transceiverSetCodecPreferencesFailed" + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + id codecs = argsMap[@"codecs"]; + NSMutableArray* codecCaps = [NSMutableArray array]; + for (id c in codecs) { + NSArray* kindAndName = [c[@"mimeType"] componentsSeparatedByString:@"/"]; + NSString* kind = [kindAndName[0] lowercaseString]; + NSString* name = kindAndName[1]; + NSLog(@"codec %@/%@", kind, name); + NSDictionary* parameters = nil; + if (c[@"sdpFmtpLine"] != nil && ![((NSString*)c[@"sdpFmtpLine"]) isEqualToString:@""]) { + parameters = stringToParameters((NSString*)c[@"sdpFmtpLine"]); + } + RTCRtpCodecCapability * codec = [self findCodecCapability:kind codec:name parameters:parameters]; + if(codec != nil) { + [codecCaps addObject:codec]; + } + } + [transcevier setCodecPreferences:codecCaps]; + result(nil); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.h b/common/darwin/Classes/FlutterRTCVideoRenderer.h new file mode 100644 index 0000000000..f644d57f86 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1,33 @@ +#import "FlutterWebRTCPlugin.h" + +#import +#import +#import +#import + +@interface FlutterRTCVideoRenderer + : NSObject + +/** + * The {@link RTCVideoTrack}, if any, which this instance renders. + */ +@property(nonatomic, strong) RTCVideoTrack* videoTrack; +@property(nonatomic) int64_t textureId; +@property(nonatomic, weak) id registry; +@property(nonatomic, strong) FlutterEventSink eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)dispose; + +@end + +@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; + +@end diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.m b/common/darwin/Classes/FlutterRTCVideoRenderer.m new file mode 100644 index 0000000000..8760444d2e --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1,297 @@ +#import "FlutterRTCVideoRenderer.h" + +#import +#import +#import +#import +#import + +#import + +#import "FlutterWebRTCPlugin.h" +#import + +@implementation FlutterRTCVideoRenderer { + CGSize _frameSize; + CGSize _renderSize; + CVPixelBufferRef _pixelBufferRef; + RTCVideoRotation _rotation; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; + bool _frameAvailable; + os_unfair_lock _lock; +} + +@synthesize textureId = _textureId; +@synthesize registry = _registry; +@synthesize eventSink = _eventSink; +@synthesize videoTrack = _videoTrack; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _isFirstFrameRendered = false; + _frameAvailable = false; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + _registry = registry; + _pixelBufferRef = nil; + _eventSink = nil; + _rotation = -1; + _textureId = [registry registerTexture:self]; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + return self; +} + +- (CVPixelBufferRef)copyPixelBuffer { + CVPixelBufferRef buffer = nil; + os_unfair_lock_lock(&_lock); + if (_pixelBufferRef != nil && _frameAvailable) { + buffer = CVBufferRetain(_pixelBufferRef); + _frameAvailable = false; + } + os_unfair_lock_unlock(&_lock); + return buffer; +} + +- (void)dispose { + os_unfair_lock_lock(&_lock); + [_registry unregisterTexture:_textureId]; + _textureId = -1; + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + _pixelBufferRef = nil; + } + _frameAvailable = false; + os_unfair_lock_unlock(&_lock); +} + +- (void)setVideoTrack:(RTCVideoTrack*)videoTrack { + RTCVideoTrack* oldValue = self.videoTrack; + if (oldValue != videoTrack) { + os_unfair_lock_lock(&_lock); + _videoTrack = videoTrack; + os_unfair_lock_unlock(&_lock); + _isFirstFrameRendered = false; + if (oldValue) { + [oldValue removeRenderer:self]; + } + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + if (videoTrack) { + [videoTrack addRenderer:self]; + } + } +} + +- (id)correctRotation:(const id)src + withRotation:(RTCVideoRotation)rotation { + int rotated_width = src.width; + int rotated_height = src.height; + + if (rotation == RTCVideoRotation_90 || rotation == RTCVideoRotation_270) { + int temp = rotated_width; + rotated_width = rotated_height; + rotated_height = temp; + } + + id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width + height:rotated_height]; + + [RTCYUVHelper I420Rotate:src.dataY + srcStrideY:src.strideY + srcU:src.dataU + srcStrideU:src.strideU + srcV:src.dataV + srcStrideV:src.strideV + dstY:(uint8_t*)buffer.dataY + dstStrideY:buffer.strideY + dstU:(uint8_t*)buffer.dataU + dstStrideU:buffer.strideU + dstV:(uint8_t*)buffer.dataV + dstStrideV:buffer.strideV + width:src.width + height:src.height + mode:rotation]; + + return buffer; +} + +- (void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer + withFrame:(RTCVideoFrame*)frame { + id i420Buffer = [self correctRotation:[frame.buffer toI420] + withRotation:frame.rotation]; + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + + [RTCYUVHelper I420ToARGB:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstARGB:dst + dstStrideARGB:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + [RTCYUVHelper I420ToBGRA:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstBGRA:dst + dstStrideBGRA:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } + } + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame*)frame { + + os_unfair_lock_lock(&_lock); + if(_videoTrack == nil) { + os_unfair_lock_unlock(&_lock); + return; + } + if(!_frameAvailable && _pixelBufferRef) { + [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + if(_textureId != -1) { + [_registry textureFrameAvailable:_textureId]; + } + _frameAvailable = true; + } + os_unfair_lock_unlock(&_lock); + + __weak FlutterRTCVideoRenderer* weakSelf = self; + if (_renderSize.width != frame.width || _renderSize.height != frame.height) { + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (strongSelf.eventSink) { + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeVideoSize", + @"id" : @(strongSelf.textureId), + @"width" : @(frame.width), + @"height" : @(frame.height), + }); + } + }); + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if (frame.rotation != _rotation) { + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (strongSelf.eventSink) { + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeRotation", + @"id" : @(strongSelf.textureId), + @"rotation" : @(frame.rotation), + }); + } + }); + + _rotation = frame.rotation; + } + + // Notify the Flutter new pixelBufferRef to be ready. + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (!strongSelf->_isFirstFrameRendered) { + if (strongSelf.eventSink) { + strongSelf.eventSink(@{@"event" : @"didFirstFrameRendered"}); + strongSelf->_isFirstFrameRendered = true; + } + } + }); +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + os_unfair_lock_lock(&_lock); + if (size.width != _frameSize.width || size.height != _frameSize.height) { + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + } + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); + _frameAvailable = false; + _frameSize = size; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger { + return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; +} + +- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack { + renderer.videoTrack = videoTrack; +} +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h new file mode 100644 index 0000000000..ee39d6345f --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1,96 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import +#import +#import "LocalTrack.h" + +@class FlutterRTCVideoRenderer; +@class FlutterRTCFrameCapturer; +@class FlutterRTCMediaRecorder; +@class AudioManager; + +void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event); + +typedef void (^CompletionHandler)(void); + +typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); + +@interface FlutterWebRTCPlugin : NSObject + +@property(nonatomic, strong) RTCPeerConnectionFactory* _Nullable peerConnectionFactory; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable peerConnections; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable localStreams; +@property(nonatomic, strong) NSMutableDictionary>* _Nullable localTracks; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; +@property(nonatomic, strong) NSMutableDictionary* recorders; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable videoCapturerStopHandlers; + +@property(nonatomic, strong) + NSMutableDictionary* _Nullable frameCryptors; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable keyProviders; + +#if TARGET_OS_IPHONE +@property(nonatomic, retain) + UIViewController* _Nullable viewController; /*for broadcast or ReplayKit */ +#endif + +@property(nonatomic, strong) FlutterEventSink _Nullable eventSink; +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer; +@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer; +@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; + +@property(nonatomic, strong) NSString* _Nonnull focusMode; +@property(nonatomic, strong) NSString* _Nonnull exposureMode; + +@property(nonatomic) BOOL _usingFrontCamera; +@property(nonatomic) NSInteger _lastTargetWidth; +@property(nonatomic) NSInteger _lastTargetHeight; +@property(nonatomic) NSInteger _lastTargetFps; + +@property(nonatomic, strong) AudioManager* _Nullable audioManager; + +- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (NSString*)audioTrackIdForVideoTrackId:(NSString*)videoTrackId; +- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nullable)Id; +- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream + ownerTag:(NSString* _Nullable)ownerTag; +- (NSDictionary* _Nullable)mediaTrackToMap:(RTCMediaStreamTrack* _Nonnull)track; +- (NSDictionary* _Nullable)receiverToMap:(RTCRtpReceiver* _Nonnull)receiver; +- (NSDictionary* _Nullable)transceiverToMap:(RTCRtpTransceiver* _Nonnull)transceiver; + +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId; + +- (BOOL)hasLocalAudioTrack; +- (void)ensureAudioSession; +- (void)deactiveRtcAudioSession; + +- (RTCRtpReceiver* _Nullable)getRtpReceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; +- (RTCRtpSender* _Nullable)getRtpSenderById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; + ++ (FlutterWebRTCPlugin* _Nullable)sharedSingleton; + +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m new file mode 100644 index 0000000000..73f01aebe1 --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1,2382 @@ +#import "FlutterWebRTCPlugin.h" +#import "AudioUtils.h" +#import "CameraUtils.h" +#import "FlutterRTCDataChannel.h" +#import "FlutterRTCDesktopCapturer.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCPeerConnection.h" +#import "FlutterRTCVideoRenderer.h" +#import "FlutterRTCFrameCryptor.h" +#if TARGET_OS_IPHONE +#import "FlutterRTCMediaRecorder.h" +#import "FlutterRTCVideoPlatformViewFactory.h" +#import "FlutterRTCVideoPlatformViewController.h" +#endif +#import "AudioManager.h" + +#import +#import +#import + +#import "LocalTrack.h" +#import "LocalAudioTrack.h" +#import "LocalVideoTrack.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wprotocol" + +@interface VideoEncoderFactory : RTCDefaultVideoEncoderFactory +@end + +@interface VideoDecoderFactory : RTCDefaultVideoDecoderFactory +@end + +@interface VideoEncoderFactorySimulcast : RTCVideoEncoderFactorySimulcast +@end + +NSArray* motifyH264ProfileLevelId( + NSArray* codecs) { + NSMutableArray* newCodecs = [[NSMutableArray alloc] init]; + NSInteger count = codecs.count; + for (NSInteger i = 0; i < count; i++) { + RTC_OBJC_TYPE(RTCVideoCodecInfo)* info = [codecs objectAtIndex:i]; + if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { + NSString* hexString = info.parameters[@"profile-level-id"]; + RTCH264ProfileLevelId* profileLevelId = + [[RTCH264ProfileLevelId alloc] initWithHexString:hexString]; + if (profileLevelId.level < RTCH264Level5_1) { + RTCH264ProfileLevelId* newProfileLevelId = + [[RTCH264ProfileLevelId alloc] initWithProfile:profileLevelId.profile + level:RTCH264Level5_1]; + // NSLog(@"profile-level-id: %@ => %@", hexString, [newProfileLevelId hexString]); + NSMutableDictionary* parametersCopy = [[NSMutableDictionary alloc] init]; + [parametersCopy addEntriesFromDictionary:info.parameters]; + [parametersCopy setObject:[newProfileLevelId hexString] forKey:@"profile-level-id"]; + [newCodecs insertObject:[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name + parameters:parametersCopy] + atIndex:i]; + } else { + [newCodecs insertObject:info atIndex:i]; + } + } else { + [newCodecs insertObject:info atIndex:i]; + } + } + return newCodecs; +} + +@implementation VideoEncoderFactory +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +@implementation VideoDecoderFactory +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +@implementation VideoEncoderFactorySimulcast +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event) { + dispatch_async(dispatch_get_main_queue(), ^{ + sink(event); + }); +} + +@implementation FlutterWebRTCPlugin { +#pragma clang diagnostic pop + FlutterMethodChannel* _methodChannel; + FlutterEventSink _eventSink; + FlutterEventChannel* _eventChannel; + id _registry; + id _messenger; + id _textures; + BOOL _speakerOn; + BOOL _speakerOnButPreferBluetooth; + AVAudioSessionPort _preferredInput; + AudioManager* _audioManager; +#if TARGET_OS_IPHONE + FLutterRTCVideoPlatformViewFactory *_platformViewFactory; +#endif +} + +static FlutterWebRTCPlugin *sharedSingleton; + ++ (FlutterWebRTCPlugin *)sharedSingleton +{ + @synchronized(self) + { + return sharedSingleton; + } +} + +@synthesize messenger = _messenger; +@synthesize eventSink = _eventSink; +@synthesize preferredInput = _preferredInput; +@synthesize audioManager = _audioManager; + ++ (void)registerWithRegistrar:(NSObject*)registrar { + FlutterMethodChannel* channel = + [FlutterMethodChannel methodChannelWithName:@"FlutterWebRTC.Method" + binaryMessenger:[registrar messenger]]; +#if TARGET_OS_IPHONE + UIViewController* viewController = (UIViewController*)registrar.messenger; +#endif + FlutterWebRTCPlugin* instance = + [[FlutterWebRTCPlugin alloc] initWithChannel:channel + registrar:registrar + messenger:[registrar messenger] +#if TARGET_OS_IPHONE + viewController:viewController +#endif + withTextures:[registrar textures]]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithChannel:(FlutterMethodChannel*)channel + registrar:(NSObject*)registrar + messenger:(NSObject*)messenger +#if TARGET_OS_IPHONE + viewController:(UIViewController*)viewController +#endif + withTextures:(NSObject*)textures { + + self = [super init]; + sharedSingleton = self; + + FlutterEventChannel* eventChannel = + [FlutterEventChannel eventChannelWithName:@"FlutterWebRTC.Event" binaryMessenger:messenger]; + [eventChannel setStreamHandler:self]; + + if (self) { + _methodChannel = channel; + _registry = registrar; + _textures = textures; + _messenger = messenger; + _speakerOn = NO; + _speakerOnButPreferBluetooth = NO; + _eventChannel = eventChannel; + _audioManager = AudioManager.sharedInstance; + +#if TARGET_OS_IPHONE + _preferredInput = AVAudioSessionPortHeadphones; + self.viewController = viewController; + _platformViewFactory = [[FLutterRTCVideoPlatformViewFactory alloc] initWithMessenger:messenger]; + [registrar registerViewFactory:_platformViewFactory withId:FLutterRTCVideoPlatformViewFactoryID]; +#endif + } + + NSDictionary* fieldTrials = @{kRTCFieldTrialUseNWPathMonitor : kRTCFieldTrialEnabledValue}; + RTCInitFieldTrialDictionary(fieldTrials); + + self.peerConnections = [NSMutableDictionary new]; + self.localStreams = [NSMutableDictionary new]; + self.localTracks = [NSMutableDictionary new]; + self.renders = [NSMutableDictionary new]; + self.frameCryptors = [NSMutableDictionary new]; + self.keyProviders = [NSMutableDictionary new]; + self.videoCapturerStopHandlers = [NSMutableDictionary new]; + self.recorders = [NSMutableDictionary new]; +#if TARGET_OS_IPHONE + self.focusMode = @"locked"; + self.exposureMode = @"locked"; + AVAudioSession* session = [AVAudioSession sharedInstance]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(didSessionRouteChange:) + name:AVAudioSessionRouteChangeNotification + object:session]; +#endif +#if TARGET_OS_OSX + [_peerConnectionFactory.audioDeviceModule setDevicesUpdatedHandler:^(void) { + NSLog(@"Handle Devices Updated!"); + if (self.eventSink) { + postEvent( self.eventSink, @{@"event" : @"onDeviceChange"}); + } + }]; +#endif + return self; +} + +- (void)detachFromEngineForRegistrar:(NSObject*)registrar { + for (RTCPeerConnection* peerConnection in _peerConnections.allValues) { + for (RTCDataChannel* dataChannel in peerConnection.dataChannels) { + dataChannel.eventSink = nil; + } + peerConnection.eventSink = nil; + } + _eventSink = nil; +} + +#pragma mark - FlutterStreamHandler methods + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} + +- (void)didSessionRouteChange:(NSNotification*)notification { +#if TARGET_OS_IPHONE + NSDictionary* interuptionDict = notification.userInfo; + NSInteger routeChangeReason = + [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; + if (self.eventSink && + (routeChangeReason == AVAudioSessionRouteChangeReasonNewDeviceAvailable || + routeChangeReason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable || + routeChangeReason == AVAudioSessionRouteChangeReasonCategoryChange || + routeChangeReason == AVAudioSessionRouteChangeReasonOverride)) { + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + } +#endif +} + +- (void)initialize:(NSArray*)networkIgnoreMask +bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { + // RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); + if (!_peerConnectionFactory) { + VideoDecoderFactory* decoderFactory = [[VideoDecoderFactory alloc] init]; + VideoEncoderFactory* encoderFactory = [[VideoEncoderFactory alloc] init]; + + VideoEncoderFactorySimulcast* simulcastFactory = + [[VideoEncoderFactorySimulcast alloc] initWithPrimary:encoderFactory fallback:encoderFactory]; + + _peerConnectionFactory = + [[RTCPeerConnectionFactory alloc] initWithBypassVoiceProcessing:bypassVoiceProcessing + encoderFactory:simulcastFactory + decoderFactory:decoderFactory + audioProcessingModule:_audioManager.audioProcessingModule]; + + RTCPeerConnectionFactoryOptions *options = [[RTCPeerConnectionFactoryOptions alloc] init]; + for (NSString* adapter in networkIgnoreMask) + { + if ([@"adapterTypeEthernet" isEqualToString:adapter]) { + options.ignoreEthernetNetworkAdapter = YES; + } else if ([@"adapterTypeWifi" isEqualToString:adapter]) { + options.ignoreWiFiNetworkAdapter = YES; + } else if ([@"adapterTypeCellular" isEqualToString:adapter]) { + options.ignoreCellularNetworkAdapter = YES; + } else if ([@"adapterTypeVpn" isEqualToString:adapter]) { + options.ignoreVPNNetworkAdapter = YES; + } else if ([@"adapterTypeLoopback" isEqualToString:adapter]) { + options.ignoreLoopbackNetworkAdapter = YES; + } else if ([@"adapterTypeAny" isEqualToString:adapter]) { + options.ignoreEthernetNetworkAdapter = YES; + options.ignoreWiFiNetworkAdapter = YES; + options.ignoreCellularNetworkAdapter = YES; + options.ignoreVPNNetworkAdapter = YES; + options.ignoreLoopbackNetworkAdapter = YES; + } + } + + [_peerConnectionFactory setOptions: options]; + } +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + if ([@"initialize" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* options = argsMap[@"options"]; + BOOL enableBypassVoiceProcessing = NO; + if(options[@"bypassVoiceProcessing"] != nil){ + enableBypassVoiceProcessing = ((NSNumber*)options[@"bypassVoiceProcessing"]).boolValue; + } + NSArray* networkIgnoreMask = [NSArray new]; + if (options[@"networkIgnoreMask"] != nil) { + networkIgnoreMask = ((NSArray*)options[@"networkIgnoreMask"]); + } + [self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing]; + result(@""); + } else if ([@"createPeerConnection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + NSDictionary* constraints = argsMap[@"constraints"]; + + RTCPeerConnection* peerConnection = [self.peerConnectionFactory + peerConnectionWithConfiguration:[self RTCConfiguration:configuration] + constraints:[self parseMediaConstraints:constraints] + delegate:self]; + + peerConnection.remoteStreams = [NSMutableDictionary new]; + peerConnection.remoteTracks = [NSMutableDictionary new]; + peerConnection.dataChannels = [NSMutableDictionary new]; + + NSString* peerConnectionId = [[NSUUID UUID] UUIDString]; + peerConnection.flutterId = peerConnectionId; + + /*Create Event Channel.*/ + peerConnection.eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectionEvent%@", + peerConnectionId] + binaryMessenger:_messenger]; + [peerConnection.eventChannel setStreamHandler:peerConnection]; + + self.peerConnections[peerConnectionId] = peerConnection; + result(@{@"peerConnectionId" : peerConnectionId}); + } else if ([@"getUserMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getUserMedia:constraints result:result]; + } else if ([@"getDisplayMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getDisplayMedia:constraints result:result]; + } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { + [self createLocalMediaStream:result]; + } else if ([@"getSources" isEqualToString:call.method]) { + [self getSources:result]; + } else if ([@"selectAudioInput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioInput:deviceId result:result]; + } else if ([@"selectAudioOutput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioOutput:deviceId result:result]; + } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + [self mediaStreamGetTracks:streamId result:result]; + } else if ([@"createOffer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createAnswer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionCreateAnswer:constraints peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream* stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection && stream) { + [peerConnection addStream:stream]; + result(@""); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString + stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"removeStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream* stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection && stream) { + [peerConnection removeStream:stream]; + result(nil); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString + stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"captureFrame" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"setLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary* descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType + sdp:sdp]; + if (peerConnection) { + [self peerConnectionSetLocalDescription:description + peerConnection:peerConnection + result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary* descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType + sdp:sdp]; + + if (peerConnection) { + [self peerConnectionSetRemoteDescription:description + peerConnection:peerConnection + result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"sendDtmf" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* tone = argsMap[@"tone"]; + int duration = ((NSNumber*)argsMap[@"duration"]).intValue; + int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; + + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCRtpSender* audioSender = nil; + for (RTCRtpSender* rtpSender in peerConnection.senders) { + if ([[[rtpSender track] kind] isEqualToString:@"audio"]) { + audioSender = rtpSender; + } + } + if (audioSender) { + NSOperationQueue* queue = [[NSOperationQueue alloc] init]; + [queue addOperationWithBlock:^{ + double durationMs = duration / 1000.0; + double interToneGapMs = interToneGap / 1000.0; + [audioSender.dtmfSender insertDtmf:(NSString*)tone + duration:(NSTimeInterval)durationMs + interToneGap:(NSTimeInterval)interToneGapMs]; + NSLog(@"DTMF Tone played "); + }]; + } + + result(@{@"result" : @"success"}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addCandidate" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* candMap = argsMap[@"candidate"]; + NSString* sdp = candMap[@"candidate"]; + id sdpMLineIndexValue = candMap[@"sdpMLineIndex"]; + int sdpMLineIndex = 0; + if (![sdpMLineIndexValue isKindOfClass:[NSNull class]]) { + sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; + } + NSString* sdpMid = candMap[@"sdpMid"]; + + RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp + sdpMLineIndex:sdpMLineIndex + sdpMid:sdpMid]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection) { + [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getStats" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + id trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + if (trackId != nil && trackId != [NSNull null]) { + return [self peerConnectionGetStatsForTrackId:trackId + peerConnection:peerConnection + result:result]; + } else { + return [self peerConnectionGetStats:peerConnection result:result]; + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createDataChannel" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* label = argsMap[@"label"]; + NSDictionary* dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; + [self createDataChannel:peerConnectionId + label:label + config:[self RTCDataChannelConfiguration:dataChannelDict] + messenger:_messenger + result:result]; + } else if ([@"dataChannelSend" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + NSString* type = argsMap[@"type"]; + id data = argsMap[@"data"]; + + [self dataChannelSend:peerConnectionId dataChannelId:dataChannelId data:data type:type]; + result(nil); + } else if ([@"dataChannelGetBufferedAmount" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + + [self dataChannelGetBufferedAmount:peerConnectionId dataChannelId:dataChannelId result:result]; + } + else if ([@"dataChannelClose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + [self dataChannelClose:peerConnectionId dataChannelId:dataChannelId]; + result(nil); + } else if ([@"streamDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + RTCMediaStream* stream = self.localStreams[streamId]; + BOOL shouldCallResult = YES; + if (stream) { + for (RTCVideoTrack* track in stream.videoTracks) { + [_localTracks removeObjectForKey:track.trackId]; + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:videoTrack.trackId]; + if(renderer != nil) { + renderer.videoTrack = nil; + } + CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[videoTrack.trackId]; + if (stopHandler) { + shouldCallResult = NO; + stopHandler(^{ + NSLog(@"video capturer stopped, trackID = %@", videoTrack.trackId); + self.videoCapturer = nil; + result(nil); + }); + [self.videoCapturerStopHandlers removeObjectForKey:videoTrack.trackId]; + } + } + for (RTCAudioTrack* track in stream.audioTracks) { + [_localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:streamId]; + [self deactiveRtcAudioSession]; + } + if (shouldCallResult) { + // do not call if will be called in stopCapturer above. + result(nil); + } + } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* enabled = argsMap[@"enabled"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil) { + track.isEnabled = enabled.boolValue; + } + result(nil); + } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStream* stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (track != nil) { + if ([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + [stream addAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + [stream addVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" + message:nil + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" + message:nil + details:nil]); + } + result(nil); + } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStream* stream = self.localStreams[streamId]; + if (stream) { + id track = self.localTracks[trackId]; + if (track != nil) { + if ([track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; + [stream removeAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [stream removeVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" + message:nil + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" + message:nil + details:nil]); + } + result(nil); + } else if ([@"trackDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL audioTrack = NO; + for (NSString* streamId in self.localStreams) { + RTCMediaStream* stream = [self.localStreams objectForKey:streamId]; + for (RTCAudioTrack* track in stream.audioTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream removeAudioTrack:track]; + audioTrack = YES; + } + } + for (RTCVideoTrack* track in stream.videoTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream removeVideoTrack:track]; + CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[track.trackId]; + if (stopHandler) { + stopHandler(^{ + NSLog(@"video capturer stopped, trackID = %@", track.trackId); + }); + [self.videoCapturerStopHandlers removeObjectForKey:track.trackId]; + } + } + } + } + [_localTracks removeObjectForKey:trackId]; + if (audioTrack) { + [self ensureAudioSession]; + } + FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:trackId]; + if(renderer != nil) { + renderer.videoTrack = nil; + } + result(nil); + } else if ([@"restartIce" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (!peerConnection) { + result([FlutterError errorWithCode:@"restartIce: peerConnection is nil" + message:nil + details:nil]); + } else { + [peerConnection restartIce]; + result(nil); + } + } else if ([@"peerConnectionClose" isEqualToString:call.method] || + [@"peerConnectionDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [peerConnection close]; + [self.peerConnections removeObjectForKey:peerConnectionId]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + for (NSString* dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; + } + [self deactiveRtcAudioSession]; + result(nil); + } else if ([@"createVideoRenderer" isEqualToString:call.method]) { + FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures + messenger:_messenger]; + self.renders[@(render.textureId)] = render; + result(@{@"textureId" : @(render.textureId)}); + } else if ([@"videoRendererDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer* render = self.renders[textureId]; + if(render != nil) { + render.videoTrack = nil; + [render dispose]; + [self.renders removeObjectForKey:textureId]; + } + result(nil); + } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer* render = self.renders[textureId]; + NSString* streamId = argsMap[@"streamId"]; + NSString* ownerTag = argsMap[@"ownerTag"]; + NSString* trackId = argsMap[@"trackId"]; + if (!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" + message:nil + details:nil]); + return; + } + RTCMediaStream* stream = nil; + RTCVideoTrack* videoTrack = nil; + if ([ownerTag isEqualToString:@"local"]) { + stream = _localStreams[streamId]; + } + if (!stream) { + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if (stream) { + NSArray* videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + for (RTCVideoTrack* track in videoTracks) { + if ([track.trackId isEqualToString:trackId]) { + videoTrack = track; + } + } + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + [self rendererSetSrcObject:render stream:videoTrack]; + result(nil); + } +#if TARGET_OS_IPHONE + else if ([@"videoPlatformViewRendererSetSrcObject" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* viewId = argsMap[@"viewId"]; + FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; + NSString* streamId = argsMap[@"streamId"]; + NSString* ownerTag = argsMap[@"ownerTag"]; + NSString* trackId = argsMap[@"trackId"]; + if (!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" + message:nil + details:nil]); + return; + } + RTCMediaStream* stream = nil; + RTCVideoTrack* videoTrack = nil; + if ([ownerTag isEqualToString:@"local"]) { + stream = _localStreams[streamId]; + } + if (!stream) { + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if (stream) { + NSArray* videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + for (RTCVideoTrack* track in videoTracks) { + if ([track.trackId isEqualToString:trackId]) { + videoTrack = track; + } + } + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + render.videoTrack = videoTrack; + result(nil); + } else if ([@"videoPlatformViewRendererDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* viewId = argsMap[@"viewId"]; + FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; + if(render != nil) { + render.videoTrack = nil; + [_platformViewFactory.renders removeObjectForKey:viewId]; + } + result(nil); + } +#endif + else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackHasTorch:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL torch = [argsMap[@"torch"] boolValue]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetZoom" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + double zoomLevel = [argsMap[@"zoomLevel"] doubleValue]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackSetZoom:videoTrack zoomLevel:zoomLevel result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* focusMode = argsMap[@"focusMode"]; + id track = self.localTracks[trackId]; + if (track != nil && focusMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusMode:videoTrack focusMode:focusMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusPoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* focusPoint = argsMap[@"focusPoint"]; + id track = self.localTracks[trackId]; + if (track != nil && focusPoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusPoint:videoTrack focusPoint:focusPoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposureMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* exposureMode = argsMap[@"exposureMode"]; + id track = self.localTracks[trackId]; + if (track != nil && exposureMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposureMode:videoTrack exposureMode:exposureMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposurePoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* exposurePoint = argsMap[@"exposurePoint"]; + id track = self.localTracks[trackId]; + if (track != nil && exposurePoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposurePoint:videoTrack exposurePoint:exposurePoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSwitchCamera:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"setVolume" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* volume = argsMap[@"volume"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + RTCAudioSource* audioSource = audioTrack.source; + audioSource.volume = [volume doubleValue]; + } + result(nil); + } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* mute = argsMap[@"mute"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; + audioTrack.isEnabled = !mute.boolValue; + } + result(nil); + } +#if TARGET_OS_IPHONE + else if ([@"enableSpeakerphone" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* enable = argsMap[@"enable"]; + _speakerOn = enable.boolValue; + _speakerOnButPreferBluetooth = NO; + [AudioUtils setSpeakerphoneOn:_speakerOn]; + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + result(nil); + } + else if ([@"ensureAudioSession" isEqualToString:call.method]) { + [self ensureAudioSession]; + result(nil); + } + else if ([@"enableSpeakerphoneButPreferBluetooth" isEqualToString:call.method]) { + _speakerOn = YES; + _speakerOnButPreferBluetooth = YES; + [AudioUtils setSpeakerphoneOnButPreferBluetooth]; + result(nil); + } + else if([@"setAppleAudioConfiguration" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + [AudioUtils setAppleAudioConfiguration:configuration]; + result(nil); + } +#endif + else if ([@"getLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCSessionDescription* sdp = peerConnection.localDescription; + if (nil == sdp) { + result(nil); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCSessionDescription* sdp = peerConnection.remoteDescription; + if (nil == sdp) { + result(nil); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setConfiguration" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* configuration = argsMap[@"configuration"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] + peerConnection:peerConnection]; + result(nil); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; + if (sender == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection.addTrack failed!"] + details:nil]); + return; + } + + result([self rtpSenderToMap:sender]); + } else if ([@"removeTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + result(@{@"result" : @([peerConnection removeTrack:sender])}); + } else if ([@"addTransceiver" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* mediaType = argsMap[@"mediaType"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transceiver = nil; + BOOL hasAudio = NO; + if (trackId != nil) { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit* init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverWithTrack:track init:init]; + } else { + transceiver = [peerConnection addTransceiverWithTrack:track]; + } + if ([track.kind isEqualToString:@"audio"]) { + hasAudio = YES; + } + } else if (mediaType != nil) { + RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit* init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; + } else { + transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; + } + if (rtpMediaType == RTCRtpMediaTypeAudio) { + hasAudio = YES; + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] + details:nil]); + return; + } + + if (transceiver == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] + details:nil]); + return; + } + + result([self transceiverToMap:transceiver]); + } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* direction = argsMap[@"direction"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; + result(nil); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method] || + [@"rtpTransceiverGetDirection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + + if ([@"rtpTransceiverGetDirection" isEqualToString:call.method]) { + result(@{@"result" : [self transceiverDirectionString:transcevier.direction]}); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]) { + RTCRtpTransceiverDirection directionOut = transcevier.direction; + if ([transcevier currentDirection:&directionOut]) { + result(@{@"result" : [self transceiverDirectionString:directionOut]}); + } else { + result(nil); + } + } + } else if ([@"rtpTransceiverStop" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier stopInternal]; + result(nil); + } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSDictionary* parameters = argsMap[@"parameters"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setParameters:[self updateRtpParameters:sender.parameters with:parameters]]; + + result(@{@"result" : @(YES)}); + } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack* track = nil; + if ([trackId length] > 0) { + track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack* track = nil; + if ([trackId length] > 0) { + track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetStreams" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setStreamIds:streamIds]; + result(nil); + } else if ([@"getSenders" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* senders = [NSMutableArray array]; + for (RTCRtpSender* sender in peerConnection.senders) { + [senders addObject:[self rtpSenderToMap:sender]]; + } + + result(@{@"senders" : senders}); + } else if ([@"getReceivers" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* receivers = [NSMutableArray array]; + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + [receivers addObject:[self receiverToMap:receiver]]; + } + + result(@{@"receivers" : receivers}); + } else if ([@"getTransceivers" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* transceivers = [NSMutableArray array]; + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + [transceivers addObject:[self transceiverToMap:transceiver]]; + } + + result(@{@"transceivers" : transceivers}); + } else if ([@"getDesktopSources" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self getDesktopSources:argsMap result:result]; + } else if ([@"updateDesktopSources" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self updateDesktopSources:argsMap result:result]; + } else if ([@"getDesktopSourceThumbnail" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self getDesktopSourceThumbnail:argsMap result:result]; + } else if ([@"setCodecPreferences" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self transceiverSetCodecPreferences:argsMap result:result]; + } else if ([@"getRtpReceiverCapabilities" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self peerConnectionGetRtpReceiverCapabilities:argsMap result:result]; + } else if ([@"getRtpSenderCapabilities" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self peerConnectionGetRtpSenderCapabilities:argsMap result:result]; + } else if ([@"getSignalingState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForSignalingState:peerConnection.signalingState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getIceGatheringState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForICEGatheringState:peerConnection.iceGatheringState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getIceConnectionState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForICEConnectionState:peerConnection.iceConnectionState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getConnectionState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForPeerConnectionState:peerConnection.connectionState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } +#if TARGET_OS_IOS + } else if ([@"startRecordToFile" isEqualToString:call.method]){ + + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"videoTrackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* audioTrackId = [self audioTrackIdForVideoTrackId:trackId]; + + RTCMediaStreamTrack *track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + RTCMediaStreamTrack *audioTrack = [self trackForId:audioTrackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + NSURL* pathUrl = [NSURL fileURLWithPath:path]; + self.recorders[recorderId] = [[FlutterRTCMediaRecorder alloc] + initWithVideoTrack:(RTCVideoTrack *)track + audioTrack:(RTCAudioTrack *)audioTrack + outputFile:pathUrl + ]; + } + result(nil); + } else if ([@"stopRecordToFile" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + FlutterRTCMediaRecorder* recorder = self.recorders[recorderId]; + if (recorder != nil) { + [recorder stop:result]; + [self.recorders removeObjectForKey:recorderId]; + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: recorder with id %@ not found!",recorderId] + details:nil]); + } +#endif + } else { + [self handleFrameCryptorMethodCall:call result:result]; + } +} + +- (void)dealloc { + [_localTracks removeAllObjects]; + _localTracks = nil; + [_localStreams removeAllObjects]; + _localStreams = nil; + + for (NSString* peerConnectionId in _peerConnections) { + RTCPeerConnection* peerConnection = _peerConnections[peerConnectionId]; + peerConnection.delegate = nil; + [peerConnection close]; + } + [_peerConnections removeAllObjects]; + _peerConnectionFactory = nil; +} + +- (BOOL)hasLocalAudioTrack { + for (id key in _localTracks.allKeys) { + id track = [_localTracks objectForKey:key]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + return YES; + } + } + return NO; +} + +- (void)ensureAudioSession { +#if TARGET_OS_IPHONE + [AudioUtils ensureAudioSessionWithRecording:[self hasLocalAudioTrack]]; +#endif +} + +- (void)deactiveRtcAudioSession { +#if TARGET_OS_IPHONE + if (![self hasLocalAudioTrack] && self.peerConnections.count == 0) { + [AudioUtils deactiveRtcAudioSession]; + } +#endif +} + +- (void)mediaStreamGetTracks:(NSString*)streamId result:(FlutterResult)result { + RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; + if (stream) { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + NSString* trackId = track.trackId; + [self.localTracks setObject:[[LocalAudioTrack alloc] initWithTrack:(RTCAudioTrack *)track] forKey:trackId]; + [audioTracks addObject:@{ + @"enabled" : @(track.isEnabled), + @"id" : trackId, + @"kind" : track.kind, + @"label" : trackId, + @"readyState" : @"live", + @"remote" : @(NO) + }]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + NSString* trackId = track.trackId; + [_localTracks setObject:[[LocalVideoTrack alloc] initWithTrack:(RTCVideoTrack *)track] + forKey:trackId]; + [videoTracks addObject:@{ + @"enabled" : @(track.isEnabled), + @"id" : trackId, + @"kind" : track.kind, + @"label" : trackId, + @"readyState" : @"live", + @"remote" : @(NO) + }]; + } + + result(@{@"audioTracks" : audioTracks, @"videoTracks" : videoTracks}); + } else { + result(nil); + } +} + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString*)peerConnectionId { + RTCMediaStream* stream = nil; + if (peerConnectionId.length > 0) { + RTCPeerConnection* peerConnection = [_peerConnections objectForKey:peerConnectionId]; + stream = peerConnection.remoteStreams[streamId]; + } else { + for (RTCPeerConnection* peerConnection in _peerConnections.allValues) { + stream = peerConnection.remoteStreams[streamId]; + if (stream) { + break; + } + } + } + if (!stream) { + stream = _localStreams[streamId]; + } + return stream; +} + +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId { + RTCMediaStreamTrack *mediaStreamTrack = nil; + for (NSString* currentId in _peerConnections.allKeys) { + RTCPeerConnection* peerConnection = _peerConnections[currentId]; + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver.track != nil && + [transceiver.receiver.track.trackId isEqual:trackId]) { + mediaStreamTrack = transceiver.receiver.track; + break; + } + } + } + if (mediaStreamTrack) { + break; + } + } + + return mediaStreamTrack; +} + +- (NSString *)audioTrackIdForVideoTrackId:(NSString *)videoTrackId { + NSString *audioTrackId = nil; + + // Iterate through all peerConnections + for (NSString *peerConnectionId in self.peerConnections) { + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + // Iterate through the receivers to find the video track + for (RTCRtpReceiver *receiver in peerConnection.receivers) { + RTCMediaStreamTrack *track = [receiver valueForKey:@"track"]; + if ([track.kind isEqualToString:@"video"] && [track.trackId isEqualToString:videoTrackId]) { + // Found the video track, now look for the audio track in the same peerConnection + for (RTCRtpReceiver *audioReceiver in peerConnection.receivers) { + RTCMediaStreamTrack *audioTrack = [audioReceiver valueForKey:@"track"]; + if ([audioTrack.kind isEqualToString:@"audio"]) { + audioTrackId = audioTrack.trackId; + break; + } + } + break; + } + } + + // If the audioTrackId is found, break out of the loop + if (audioTrackId != nil) { + break; + } + } + + return audioTrackId; +} + +- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId peerConnectionId:(NSString*)peerConnectionId { + id track = _localTracks[trackId]; + RTCMediaStreamTrack *mediaStreamTrack = nil; + if (!track) { + for (NSString* currentId in _peerConnections.allKeys) { + if (peerConnectionId && [currentId isEqualToString:peerConnectionId] == false) { + continue; + } + RTCPeerConnection* peerConnection = _peerConnections[currentId]; + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver.track != nil && + [transceiver.receiver.track.trackId isEqual:trackId]) { + mediaStreamTrack = transceiver.receiver.track; + break; + } + } + } + if (mediaStreamTrack) { + break; + } + } + } else { + mediaStreamTrack = [track track]; + } + return mediaStreamTrack; +} + +- (RTCIceServer*)RTCIceServer:(id)json { + if (!json) { + NSLog(@"a valid iceServer value"); + return nil; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return nil; + } + + NSArray* urls; + if ([json[@"url"] isKindOfClass:[NSString class]]) { + // TODO: 'url' is non-standard + urls = @[ json[@"url"] ]; + } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { + urls = @[ json[@"urls"] ]; + } else { + urls = (NSArray*)json[@"urls"]; + } + + if (json[@"username"] != nil || json[@"credential"] != nil) { + return [[RTCIceServer alloc] initWithURLStrings:urls + username:json[@"username"] + credential:json[@"credential"]]; + } + + return [[RTCIceServer alloc] initWithURLStrings:urls]; +} + +- (nonnull RTCConfiguration*)RTCConfiguration:(id)json { + RTCConfiguration* config = [[RTCConfiguration alloc] init]; + + if (!json) { + return config; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return config; + } + + if (json[@"audioJitterBufferMaxPackets"] != nil && + [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; + } + + if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { + NSString* bundlePolicy = json[@"bundlePolicy"]; + if ([bundlePolicy isEqualToString:@"balanced"]) { + config.bundlePolicy = RTCBundlePolicyBalanced; + } else if ([bundlePolicy isEqualToString:@"max-compat"]) { + config.bundlePolicy = RTCBundlePolicyMaxCompat; + } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { + config.bundlePolicy = RTCBundlePolicyMaxBundle; + } + } + + if (json[@"iceBackupCandidatePairPingInterval"] != nil && + [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + config.iceBackupCandidatePairPingInterval = + [json[@"iceBackupCandidatePairPingInterval"] intValue]; + } + + if (json[@"iceConnectionReceivingTimeout"] != nil && + [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; + } + + if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { + NSMutableArray* iceServers = [NSMutableArray new]; + for (id server in json[@"iceServers"]) { + RTCIceServer* convert = [self RTCIceServer:server]; + if (convert != nil) { + [iceServers addObject:convert]; + } + } + config.iceServers = iceServers; + } + + if (json[@"iceTransportPolicy"] != nil && + [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { + NSString* iceTransportPolicy = json[@"iceTransportPolicy"]; + if ([iceTransportPolicy isEqualToString:@"all"]) { + config.iceTransportPolicy = RTCIceTransportPolicyAll; + } else if ([iceTransportPolicy isEqualToString:@"none"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNone; + } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNoHost; + } else if ([iceTransportPolicy isEqualToString:@"relay"]) { + config.iceTransportPolicy = RTCIceTransportPolicyRelay; + } + } + + if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { + NSString* rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; + if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; + } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; + } + } + + if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { + NSString* sdpSemantics = json[@"sdpSemantics"]; + if ([sdpSemantics isEqualToString:@"plan-b"]) { + config.sdpSemantics = RTCSdpSemanticsPlanB; + } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { + config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; + } + } + + if (json[@"maxIPv6Networks"] != nil && [json[@"maxIPv6Networks"] isKindOfClass:[NSNumber class]]) { + NSNumber* maxIPv6Networks = json[@"maxIPv6Networks"]; + config.maxIPv6Networks = [maxIPv6Networks intValue]; + } + + // === below is private api in webrtc === + if (json[@"tcpCandidatePolicy"] != nil && + [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { + NSString* tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; + if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; + } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; + } + } + + // candidateNetworkPolicy (private api) + if (json[@"candidateNetworkPolicy"] != nil && + [json[@"candidateNetworkPolicy"] isKindOfClass:[NSString class]]) { + NSString* candidateNetworkPolicy = json[@"candidateNetworkPolicy"]; + if ([candidateNetworkPolicy isEqualToString:@"all"]) { + config.candidateNetworkPolicy = RTCCandidateNetworkPolicyAll; + } else if ([candidateNetworkPolicy isEqualToString:@"low_cost"]) { + config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost; + } + } + + // KeyType (private api) + if (json[@"keyType"] != nil && [json[@"keyType"] isKindOfClass:[NSString class]]) { + NSString* keyType = json[@"keyType"]; + if ([keyType isEqualToString:@"RSA"]) { + config.keyType = RTCEncryptionKeyTypeRSA; + } else if ([keyType isEqualToString:@"ECDSA"]) { + config.keyType = RTCEncryptionKeyTypeECDSA; + } + } + + // continualGatheringPolicy (private api) + if (json[@"continualGatheringPolicy"] != nil && + [json[@"continualGatheringPolicy"] isKindOfClass:[NSString class]]) { + NSString* continualGatheringPolicy = json[@"continualGatheringPolicy"]; + if ([continualGatheringPolicy isEqualToString:@"gather_once"]) { + config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherOnce; + } else if ([continualGatheringPolicy isEqualToString:@"gather_continually"]) { + config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; + } + } + + // audioJitterBufferMaxPackets (private api) + if (json[@"audioJitterBufferMaxPackets"] != nil && + [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + NSNumber* audioJitterBufferMaxPackets = json[@"audioJitterBufferMaxPackets"]; + config.audioJitterBufferMaxPackets = [audioJitterBufferMaxPackets intValue]; + } + + // iceConnectionReceivingTimeout (private api) + if (json[@"iceConnectionReceivingTimeout"] != nil && + [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + NSNumber* iceConnectionReceivingTimeout = json[@"iceConnectionReceivingTimeout"]; + config.iceConnectionReceivingTimeout = [iceConnectionReceivingTimeout intValue]; + } + + // iceBackupCandidatePairPingInterval (private api) + if (json[@"iceBackupCandidatePairPingInterval"] != nil && + [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + NSNumber* iceBackupCandidatePairPingInterval = json[@"iceConnectionReceivingTimeout"]; + config.iceBackupCandidatePairPingInterval = [iceBackupCandidatePairPingInterval intValue]; + } + + // audioJitterBufferFastAccelerate (private api) + if (json[@"audioJitterBufferFastAccelerate"] != nil && + [json[@"audioJitterBufferFastAccelerate"] isKindOfClass:[NSNumber class]]) { + NSNumber* audioJitterBufferFastAccelerate = json[@"audioJitterBufferFastAccelerate"]; + config.audioJitterBufferFastAccelerate = [audioJitterBufferFastAccelerate boolValue]; + } + + // pruneTurnPorts (private api) + if (json[@"pruneTurnPorts"] != nil && [json[@"pruneTurnPorts"] isKindOfClass:[NSNumber class]]) { + NSNumber* pruneTurnPorts = json[@"pruneTurnPorts"]; + config.shouldPruneTurnPorts = [pruneTurnPorts boolValue]; + } + + // presumeWritableWhenFullyRelayed (private api) + if (json[@"presumeWritableWhenFullyRelayed"] != nil && + [json[@"presumeWritableWhenFullyRelayed"] isKindOfClass:[NSNumber class]]) { + NSNumber* presumeWritableWhenFullyRelayed = json[@"presumeWritableWhenFullyRelayed"]; + config.shouldPresumeWritableWhenFullyRelayed = [presumeWritableWhenFullyRelayed boolValue]; + } + + // cryptoOptions (private api) + if (json[@"cryptoOptions"] != nil && + [json[@"cryptoOptions"] isKindOfClass:[NSDictionary class]]) { + id options = json[@"cryptoOptions"]; + BOOL srtpEnableGcmCryptoSuites = NO; + BOOL sframeRequireFrameEncryption = NO; + BOOL srtpEnableEncryptedRtpHeaderExtensions = NO; + BOOL srtpEnableAes128Sha1_32CryptoCipher = NO; + + if (options[@"enableGcmCryptoSuites"] != nil && + [options[@"enableGcmCryptoSuites"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableGcmCryptoSuites"]; + srtpEnableGcmCryptoSuites = [value boolValue]; + } + + if (options[@"requireFrameEncryption"] != nil && + [options[@"requireFrameEncryption"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"requireFrameEncryption"]; + sframeRequireFrameEncryption = [value boolValue]; + } + + if (options[@"enableEncryptedRtpHeaderExtensions"] != nil && + [options[@"enableEncryptedRtpHeaderExtensions"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableEncryptedRtpHeaderExtensions"]; + srtpEnableEncryptedRtpHeaderExtensions = [value boolValue]; + } + + if (options[@"enableAes128Sha1_32CryptoCipher"] != nil && + [options[@"enableAes128Sha1_32CryptoCipher"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableAes128Sha1_32CryptoCipher"]; + srtpEnableAes128Sha1_32CryptoCipher = [value boolValue]; + } + + config.cryptoOptions = [[RTCCryptoOptions alloc] + initWithSrtpEnableGcmCryptoSuites:srtpEnableGcmCryptoSuites + srtpEnableAes128Sha1_32CryptoCipher:srtpEnableAes128Sha1_32CryptoCipher + srtpEnableEncryptedRtpHeaderExtensions:srtpEnableEncryptedRtpHeaderExtensions + sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption]; + } + + return config; +} + +- (RTCDataChannelConfiguration*)RTCDataChannelConfiguration:(id)json { + if (!json) { + return nil; + } + if ([json isKindOfClass:[NSDictionary class]]) { + RTCDataChannelConfiguration* init = [RTCDataChannelConfiguration new]; + + if (json[@"id"]) { + [init setChannelId:(int)[json[@"id"] integerValue]]; + } + if (json[@"ordered"]) { + init.isOrdered = [json[@"ordered"] boolValue]; + } + if (json[@"maxRetransmits"]) { + init.maxRetransmits = [json[@"maxRetransmits"] intValue]; + } + if (json[@"negotiated"]) { + init.isNegotiated = [json[@"negotiated"] boolValue]; + } + if (json[@"protocol"]) { + init.protocol = json[@"protocol"]; + } + return init; + } + return nil; +} + +- (CGRect)parseRect:(NSDictionary*)rect { + return CGRectMake( + [[rect valueForKey:@"left"] doubleValue], [[rect valueForKey:@"top"] doubleValue], + [[rect valueForKey:@"width"] doubleValue], [[rect valueForKey:@"height"] doubleValue]); +} + +- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { + return @{ + @"dtmfSenderId" : Id, + @"interToneGap" : @(dtmf.interToneGap / 1000.0), + @"duration" : @(dtmf.duration / 1000.0), + }; +} + +- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { + NSDictionary* rtcp = @{ + @"cname" : parameters.rtcp.cname, + @"reducedSize" : @(parameters.rtcp.isReducedSize), + }; + + NSMutableArray* headerExtensions = [NSMutableArray array]; + for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { + [headerExtensions addObject:@{ + @"uri" : headerExtension.uri, + @"encrypted" : @(headerExtension.encrypted), + @"id" : @(headerExtension.id), + }]; + } + + NSMutableArray* encodings = [NSMutableArray array]; + for (RTCRtpEncodingParameters* encoding in parameters.encodings) { + // non-nil values + NSMutableDictionary* obj = [@{@"active" : @(encoding.isActive)} mutableCopy]; + // optional values + if (encoding.rid != nil) + [obj setObject:encoding.rid forKey:@"rid"]; + if (encoding.minBitrateBps != nil) + [obj setObject:encoding.minBitrateBps forKey:@"minBitrate"]; + if (encoding.maxBitrateBps != nil) + [obj setObject:encoding.maxBitrateBps forKey:@"maxBitrate"]; + if (encoding.maxFramerate != nil) + [obj setObject:encoding.maxFramerate forKey:@"maxFramerate"]; + if (encoding.numTemporalLayers != nil) + [obj setObject:encoding.numTemporalLayers forKey:@"numTemporalLayers"]; + if (encoding.scaleResolutionDownBy != nil) + [obj setObject:encoding.scaleResolutionDownBy forKey:@"scaleResolutionDownBy"]; + if (encoding.ssrc != nil) + [obj setObject:encoding.ssrc forKey:@"ssrc"]; + + [encodings addObject:obj]; + } + + NSMutableArray* codecs = [NSMutableArray array]; + for (RTCRtpCodecParameters* codec in parameters.codecs) { + [codecs addObject:@{ + @"name" : codec.name, + @"payloadType" : @(codec.payloadType), + @"clockRate" : codec.clockRate, + @"numChannels" : codec.numChannels ? codec.numChannels : @(1), + @"parameters" : codec.parameters, + @"kind" : codec.kind + }]; + } + + NSString *degradationPreference = @"balanced"; + if(parameters.degradationPreference != nil) { + if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceMaintainFramerate ) { + degradationPreference = @"maintain-framerate"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceMaintainResolution) { + degradationPreference = @"maintain-resolution"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceBalanced) { + degradationPreference = @"balanced"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceDisabled) { + degradationPreference = @"disabled"; + } + } + + return @{ + @"transactionId" : parameters.transactionId, + @"rtcp" : rtcp, + @"headerExtensions" : headerExtensions, + @"encodings" : encodings, + @"codecs" : codecs, + @"degradationPreference" : degradationPreference, + }; +} + +- (NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { + switch (state) { + case RTCMediaStreamTrackStateLive: + return @"live"; + case RTCMediaStreamTrackStateEnded: + return @"ended"; + default: + break; + } + return @""; +} + +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream*)stream ownerTag:(NSString*)ownerTag { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + [videoTracks addObject:[self mediaTrackToMap:track]]; + } + + return @{ + @"streamId" : stream.streamId, + @"ownerTag" : ownerTag, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks, + + }; +} + +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { + if (track == nil) + return @{}; + NSDictionary* params = @{ + @"enabled" : @(track.isEnabled), + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"readyState" : [self streamTrackStateToString:track.readyState], + @"remote" : @(YES) + }; + return params; +} + +- (NSDictionary*)rtpSenderToMap:(RTCRtpSender*)sender { + NSDictionary* params = @{ + @"senderId" : sender.senderId, + @"ownsTrack" : @(YES), + @"rtpParameters" : [self rtpParametersToMap:sender.parameters], + @"track" : [self mediaTrackToMap:sender.track], + @"dtmfSender" : [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] + }; + return params; +} + +- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { + NSDictionary* params = @{ + @"receiverId" : receiver.receiverId, + @"rtpParameters" : [self rtpParametersToMap:receiver.parameters], + @"track" : [self mediaTrackToMap:receiver.track], + }; + return params; +} + +- (RTCRtpTransceiver*)getRtpTransceiverById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + NSString *mid = transceiver.mid ? transceiver.mid : @""; + if ([mid isEqualToString:Id]) { + return transceiver; + } + } + return nil; +} + +- (RTCRtpSender*)getRtpSenderById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpSender* sender in peerConnection.senders) { + if ([sender.senderId isEqualToString:Id]) { + return sender; + } + } + return nil; +} + +- (RTCRtpReceiver*)getRtpReceiverById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + if ([receiver.receiverId isEqualToString:Id]) { + return receiver; + } + } + return nil; +} + +- (RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { + RTCRtpEncodingParameters* encoding = [[RTCRtpEncodingParameters alloc] init]; + encoding.isActive = YES; + encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; + encoding.numTemporalLayers = [NSNumber numberWithInt:1]; +#if TARGET_OS_IPHONE + encoding.networkPriority = RTCPriorityLow; + encoding.bitratePriority = 1.0; +#endif + [encoding setRid:map[@"rid"]]; + + if (map[@"active"] != nil) { + [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; + } + + if (map[@"minBitrate"] != nil) { + [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrate"]]; + } + + if (map[@"maxBitrate"] != nil) { + [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrate"]]; + } + + if (map[@"maxFramerate"] != nil) { + [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; + } + + if (map[@"numTemporalLayers"] != nil) { + [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; + } + + if (map[@"scaleResolutionDownBy"] != nil) { + [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; + } + + if (map[@"scalabilityMode"] != nil) { + [encoding setScalabilityMode:(NSString*)map[@"scalabilityMode"]]; + } + + return encoding; +} + +- (RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { + NSArray* streamIds = map[@"streamIds"]; + NSArray* encodingsParams = map[@"sendEncodings"]; + NSString* direction = map[@"direction"]; + + RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; + + if (direction != nil) { + init.direction = [self stringToTransceiverDirection:direction]; + } + + if (streamIds != nil) { + init.streamIds = streamIds; + } + + if (encodingsParams != nil) { + NSMutableArray* sendEncodings = [[NSMutableArray alloc] init]; + for (NSDictionary* map in encodingsParams) { + [sendEncodings addObject:[self mapToEncoding:map]]; + } + [init setSendEncodings:sendEncodings]; + } + return init; +} + +- (RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { + if ([type isEqualToString:@"audio"]) { + return RTCRtpMediaTypeAudio; + } else if ([type isEqualToString:@"video"]) { + return RTCRtpMediaTypeVideo; + } else if ([type isEqualToString:@"data"]) { + return RTCRtpMediaTypeData; + } + return RTCRtpMediaTypeAudio; +} + +- (RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { + if ([type isEqualToString:@"sendrecv"]) { + return RTCRtpTransceiverDirectionSendRecv; + } else if ([type isEqualToString:@"sendonly"]) { + return RTCRtpTransceiverDirectionSendOnly; + } else if ([type isEqualToString:@"recvonly"]) { + return RTCRtpTransceiverDirectionRecvOnly; + } else if ([type isEqualToString:@"inactive"]) { + return RTCRtpTransceiverDirectionInactive; + } + return RTCRtpTransceiverDirectionInactive; +} + +- (RTCRtpParameters*)updateRtpParameters:(RTCRtpParameters*)parameters + with:(NSDictionary*)newParameters { + // current encodings + NSArray* currentEncodings = parameters.encodings; + // new encodings + NSArray* newEncodings = [newParameters objectForKey:@"encodings"]; + + NSString *degradationPreference = [newParameters objectForKey:@"degradationPreference"]; + + if( degradationPreference != nil) { + if( [degradationPreference isEqualToString:@"maintain-framerate"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceMaintainFramerate]; + } else if ([degradationPreference isEqualToString:@"maintain-resolution"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceMaintainResolution]; + } else if ([degradationPreference isEqualToString:@"balanced"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceBalanced]; + } else if ([degradationPreference isEqualToString:@"disabled"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceDisabled]; + } + } + + for (int i = 0; i < [newEncodings count]; i++) { + RTCRtpEncodingParameters* currentParams = nil; + NSDictionary* newParams = [newEncodings objectAtIndex:i]; + NSString* rid = [newParams objectForKey:@"rid"]; + + // update by matching RID + if ([rid isKindOfClass:[NSString class]] && [rid length] != 0) { + // try to find current encoding with same rid + NSUInteger result = + [currentEncodings indexOfObjectPassingTest:^BOOL(RTCRtpEncodingParameters* _Nonnull obj, + NSUInteger idx, BOOL* _Nonnull stop) { + // stop if found object with matching rid + return (*stop = ([rid isEqualToString:obj.rid])); + }]; + + if (result != NSNotFound) { + currentParams = [currentEncodings objectAtIndex:result]; + } + } + + // fall back to update by index + if (currentParams == nil && i < [currentEncodings count]) { + currentParams = [currentEncodings objectAtIndex:i]; + } + + if (currentParams != nil) { + // update values + NSNumber* active = [newParams objectForKey:@"active"]; + if (active != nil) + currentParams.isActive = [active boolValue]; + NSNumber* maxBitrate = [newParams objectForKey:@"maxBitrate"]; + if (maxBitrate != nil) + currentParams.maxBitrateBps = maxBitrate; + NSNumber* minBitrate = [newParams objectForKey:@"minBitrate"]; + if (minBitrate != nil) + currentParams.minBitrateBps = minBitrate; + NSNumber* maxFramerate = [newParams objectForKey:@"maxFramerate"]; + if (maxFramerate != nil) + currentParams.maxFramerate = maxFramerate; + NSNumber* numTemporalLayers = [newParams objectForKey:@"numTemporalLayers"]; + if (numTemporalLayers != nil) + currentParams.numTemporalLayers = numTemporalLayers; + NSNumber* scaleResolutionDownBy = [newParams objectForKey:@"scaleResolutionDownBy"]; + if (scaleResolutionDownBy != nil) + currentParams.scaleResolutionDownBy = scaleResolutionDownBy; + } + } + + return parameters; +} + +- (NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { + switch (direction) { + case RTCRtpTransceiverDirectionSendRecv: + return @"sendrecv"; + case RTCRtpTransceiverDirectionSendOnly: + return @"sendonly"; + case RTCRtpTransceiverDirectionRecvOnly: + return @"recvonly"; + case RTCRtpTransceiverDirectionInactive: + return @"inactive"; + case RTCRtpTransceiverDirectionStopped: + return @"stopped"; + break; + } + return nil; +} + +- (NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { + NSString* mid = transceiver.mid ? transceiver.mid : @""; + NSDictionary* params = @{ + @"transceiverId" : mid, + @"mid" : mid, + @"direction" : [self transceiverDirectionString:transceiver.direction], + @"sender" : [self rtpSenderToMap:transceiver.sender], + @"receiver" : [self receiverToMap:transceiver.receiver] + }; + return params; +} + +- (FlutterRTCVideoRenderer *)findRendererByTrackId:(NSString *)trackId { + for (FlutterRTCVideoRenderer *renderer in self.renders.allValues) { + if (renderer.videoTrack != nil && [renderer.videoTrack.trackId isEqualToString:trackId]) { + return renderer; + } + } + return nil; +} +@end diff --git a/common/darwin/Classes/LocalAudioTrack.h b/common/darwin/Classes/LocalAudioTrack.h new file mode 100644 index 0000000000..7cd1861a06 --- /dev/null +++ b/common/darwin/Classes/LocalAudioTrack.h @@ -0,0 +1,19 @@ +#import +#import "AudioProcessingAdapter.h" +#import "LocalTrack.h" + +@interface LocalAudioTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCAudioTrack* _Nonnull)track; + +@property(nonatomic, strong) RTCAudioTrack* _Nonnull audioTrack; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/common/darwin/Classes/LocalAudioTrack.m b/common/darwin/Classes/LocalAudioTrack.m new file mode 100644 index 0000000000..a080d4f090 --- /dev/null +++ b/common/darwin/Classes/LocalAudioTrack.m @@ -0,0 +1,38 @@ +#import "LocalAudioTrack.h" +#import "AudioManager.h" + +@implementation LocalAudioTrack { + RTCAudioTrack* _track; +} + +@synthesize audioTrack = _track; + +- (instancetype)initWithTrack:(RTCAudioTrack*)track { + self = [super init]; + if (self) { + _track = track; + } + return self; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +- (void)addRenderer:(id)renderer { + [AudioManager.sharedInstance addLocalAudioRenderer:renderer]; +} + +- (void)removeRenderer:(id)renderer { + [AudioManager.sharedInstance removeLocalAudioRenderer:renderer]; +} + +- (void)addProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter addProcessing:processor]; +} + +- (void)removeProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter removeProcessing:processor]; +} + +@end diff --git a/common/darwin/Classes/LocalTrack.h b/common/darwin/Classes/LocalTrack.h new file mode 100644 index 0000000000..34f2e1e29e --- /dev/null +++ b/common/darwin/Classes/LocalTrack.h @@ -0,0 +1,7 @@ +#import + +@protocol LocalTrack + +- (RTCMediaStreamTrack*)track; + +@end diff --git a/common/darwin/Classes/LocalVideoTrack.h b/common/darwin/Classes/LocalVideoTrack.h new file mode 100644 index 0000000000..e28ee76248 --- /dev/null +++ b/common/darwin/Classes/LocalVideoTrack.h @@ -0,0 +1,24 @@ +#import +#import "LocalTrack.h" +#import "VideoProcessingAdapter.h" + +@interface LocalVideoTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track; + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track + videoProcessing:(VideoProcessingAdapter* _Nullable)processing; + +@property(nonatomic, strong) RTCVideoTrack* _Nonnull videoTrack; + +@property(nonatomic, strong) VideoProcessingAdapter* _Nonnull processing; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/common/darwin/Classes/LocalVideoTrack.m b/common/darwin/Classes/LocalVideoTrack.m new file mode 100644 index 0000000000..02ca7c6c40 --- /dev/null +++ b/common/darwin/Classes/LocalVideoTrack.m @@ -0,0 +1,47 @@ +#import "LocalVideoTrack.h" + +@implementation LocalVideoTrack { + RTCVideoTrack* _track; + VideoProcessingAdapter* _processing; +} + +@synthesize videoTrack = _track; +@synthesize processing = _processing; + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + videoProcessing:(VideoProcessingAdapter*)processing { + self = [super init]; + if (self) { + _track = track; + _processing = processing; + } + return self; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track { + return [self initWithTrack:track videoProcessing:nil]; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +/** Register a renderer that will render all frames received on this track. */ +- (void)addRenderer:(id)renderer { + [_track addRenderer:renderer]; +} + +/** Deregister a renderer. */ +- (void)removeRenderer:(id)renderer { + [_track removeRenderer:renderer]; +} + +- (void)addProcessing:(id)processor { + [_processing addProcessing:processor]; +} + +- (void)removeProcessing:(id)processor { + [_processing removeProcessing:processor]; +} + +@end diff --git a/common/darwin/Classes/RTCAudioSource+Private.h b/common/darwin/Classes/RTCAudioSource+Private.h new file mode 100644 index 0000000000..6e45d12fbf --- /dev/null +++ b/common/darwin/Classes/RTCAudioSource+Private.h @@ -0,0 +1,14 @@ +#ifdef __cplusplus +#import "WebRTC/RTCAudioSource.h" +#include "media_stream_interface.h" + +@interface RTCAudioSource () + +/** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + +@end +#endif diff --git a/common/darwin/Classes/VideoProcessingAdapter.h b/common/darwin/Classes/VideoProcessingAdapter.h new file mode 100644 index 0000000000..c953316eec --- /dev/null +++ b/common/darwin/Classes/VideoProcessingAdapter.h @@ -0,0 +1,18 @@ +#import +#import + +@protocol ExternalVideoProcessingDelegate +- (RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)onFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)frame; +@end + +@interface VideoProcessingAdapter : NSObject + +- (_Nonnull instancetype)initWithRTCVideoSource:(RTCVideoSource* _Nonnull)source; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +- (RTCVideoSource* _Nonnull) source; + +@end diff --git a/common/darwin/Classes/VideoProcessingAdapter.m b/common/darwin/Classes/VideoProcessingAdapter.m new file mode 100644 index 0000000000..5b784b8111 --- /dev/null +++ b/common/darwin/Classes/VideoProcessingAdapter.m @@ -0,0 +1,55 @@ +#import "VideoProcessingAdapter.h" +#import + +@implementation VideoProcessingAdapter { + RTCVideoSource* _videoSource; + CGSize _frameSize; + NSArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)initWithRTCVideoSource:(RTCVideoSource*)source { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _videoSource = source; + _processors = [NSArray> new]; + } + return self; +} + +- (RTCVideoSource* _Nonnull) source { + return _videoSource; +} + +- (void)addProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors arrayByAddingObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]]; + os_unfair_lock_unlock(&_lock); +} + +- (void)setSize:(CGSize)size { + _frameSize = size; +} + +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + frame = [processor onFrame:frame]; + } + [_videoSource capturer:capturer didCaptureVideoFrame:frame]; + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/common/darwin/Classes/audio_sink_bridge.cpp b/common/darwin/Classes/audio_sink_bridge.cpp new file mode 100644 index 0000000000..16ce8fa841 --- /dev/null +++ b/common/darwin/Classes/audio_sink_bridge.cpp @@ -0,0 +1,27 @@ +#include "media_stream_interface.h" +#include "FlutterRTCAudioSink-Interface.h" + +class AudioSinkBridge : public webrtc::AudioTrackSinkInterface { +private: + void* sink; + +public: + AudioSinkBridge(void* sink1) { + sink = sink1; + } + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) override + { + RTCAudioSinkCallback(sink, + audio_data, + bits_per_sample, + sample_rate, + number_of_channels, + number_of_frames + ); + }; + int NumPreferredChannels() const override { return 1; } +}; diff --git a/common/darwin/Classes/media_stream_interface.h b/common/darwin/Classes/media_stream_interface.h new file mode 100644 index 0000000000..e25553f9fa --- /dev/null +++ b/common/darwin/Classes/media_stream_interface.h @@ -0,0 +1,199 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/// Source https://webrtc.googlesource.com/src/+/master/api/media_stream_interface.h + +#ifdef __cplusplus +#ifndef API_MEDIA_STREAM_INTERFACE_H_ +#define API_MEDIA_STREAM_INTERFACE_H_ + +#include +#include +#include +#include +#include + +namespace webrtc { + + // Generic observer interface. + class ObserverInterface { + public: + virtual void OnChanged() = 0; + protected: + virtual ~ObserverInterface() {} + }; + class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + virtual ~NotifierInterface() {} + }; + + enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; + // Interfaces where refcounting is part of the public api should + // inherit this abstract interface. The implementation of these + // methods is usually provided by the RefCountedObject template class, + // applied as a leaf in the inheritance tree. + class RefCountInterface { + public: + virtual void AddRef() const = 0; + virtual RefCountReleaseStatus Release() const = 0; + // Non-public destructor, because Release() has exclusive responsibility for + // destroying the object. + protected: + virtual ~RefCountInterface() {} + }; + + // Base class for sources. A MediaStreamTrack has an underlying source that + // provides media. A source can be shared by multiple tracks. + class MediaSourceInterface : public RefCountInterface, + public NotifierInterface { + public: + enum SourceState { kInitializing, kLive, kEnded, kMuted }; + virtual SourceState state() const = 0; + virtual bool remote() const = 0; + protected: + ~MediaSourceInterface() override = default; + }; + + // Interface for receiving audio data from a AudioTrack. + class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + + }; + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* absolute_capture_timestamp_ms) { + // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one + // pure virtual. + return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, + number_of_frames); + } + virtual int NumPreferredChannels() const { return -1; } + protected: + virtual ~AudioTrackSinkInterface() {} + }; + // AudioSourceInterface is a reference counted source used for AudioTracks. + // The same source can be used by multiple AudioTracks. + class AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + protected: + virtual ~AudioObserver() {} + }; + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + // Returns options for the AudioSource. + // (for some of the settings this approach is broken, e.g. setting + // audio network adaptation on the source is the wrong layer of abstraction). +// virtual const AudioOptions options() const; + }; +} +namespace rtc { + + template + class scoped_refptr { + public: + typedef T element_type; + scoped_refptr() : ptr_(nullptr) {} + scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) + explicit scoped_refptr(T* p) : ptr_(p) { + if (ptr_) + ptr_->AddRef(); + } + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) + ptr_->AddRef(); + } + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) + ptr_->AddRef(); + } + // Move constructors. + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + template + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + ~scoped_refptr() { + if (ptr_) + ptr_->Release(); + } + T* get() const { return ptr_; } + explicit operator bool() const { return ptr_ != nullptr; } + T& operator*() const { return *ptr_; } + T* operator->() const { return ptr_; } + // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a + // null pointer, all without touching the reference count of the underlying + // pointed-to object. The object is still reference counted, and the caller of + // release() is now the proud owner of one reference, so it is responsible for + // calling Release() once on the object when no longer using it. + T* release() { + T* retVal = ptr_; + ptr_ = nullptr; + return retVal; + } + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) + p->AddRef(); + if (ptr_) + ptr_->Release(); + ptr_ = p; + return *this; + } + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + template + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + void swap(T** pp) noexcept { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + void swap(scoped_refptr& r) noexcept { swap(&r.ptr_); } + protected: + T* ptr_; + }; +}; + +#endif // API_MEDIA_STREAM_INTERFACE_H_ +#endif // __cplusplus diff --git a/doc/patchs/m63_patch_for_flutter_webrtc_plugin_v0.patch b/doc/patchs/m63_patch_for_flutter_webrtc_plugin_v0.patch deleted file mode 100644 index 13b66c61c2..0000000000 --- a/doc/patchs/m63_patch_for_flutter_webrtc_plugin_v0.patch +++ /dev/null @@ -1,582 +0,0 @@ -diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn -index 5b7ffb738..42c09f1be 100644 ---- a/sdk/android/BUILD.gn -+++ b/sdk/android/BUILD.gn -@@ -346,7 +346,6 @@ rtc_shared_library("libjingle_peerconnection_so") { - - suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ] - configs += [ "//build/config/android:hide_all_but_jni" ] -- - deps = [ - ":libjingle_peerconnection_jni", - ":libjingle_peerconnection_metrics_default_jni", -diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java -index 881255d9a..38a21a176 100644 ---- a/sdk/android/api/org/webrtc/PeerConnection.java -+++ b/sdk/android/api/org/webrtc/PeerConnection.java -@@ -81,6 +81,12 @@ public class PeerConnection { - /** Triggered when a remote peer close a stream. */ - public void onRemoveStream(MediaStream stream); - -+ /** Triggered when media is received on a new track from remote stream. */ -+ public void onAddTrack(MediaStream stream, MediaStreamTrack track); -+ -+ /** Triggered when a remote stream close a track. */ -+ public void onRemoveTrack(MediaStream stream, MediaStreamTrack track); -+ - /** Triggered when a remote peer opens a DataChannel. */ - public void onDataChannel(DataChannel dataChannel); - -@@ -91,7 +97,7 @@ public class PeerConnection { - * Triggered when a new track is signaled by the remote peer, as a result of - * setRemoteDescription. - */ -- public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams); -+ public void onAddRtpReceiver(RtpReceiver receiver, MediaStream[] mediaStreams); - } - - /** Java version of PeerConnectionInterface.IceServer. */ -diff --git a/sdk/android/src/java/org/webrtc/EglBase10.java b/sdk/android/src/java/org/webrtc/EglBase10.java -index 8cbe0c022..8e48fedec 100644 ---- a/sdk/android/src/java/org/webrtc/EglBase10.java -+++ b/sdk/android/src/java/org/webrtc/EglBase10.java -@@ -25,7 +25,7 @@ import javax.microedition.khronos.egl.EGLSurface; - * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay, - * and an EGLSurface. - */ --class EglBase10 extends EglBase { -+public class EglBase10 extends EglBase { - // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION. - private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; - -diff --git a/sdk/android/src/java/org/webrtc/EglBase14.java b/sdk/android/src/java/org/webrtc/EglBase14.java -index 8c3305664..b7d6bff48 100644 ---- a/sdk/android/src/java/org/webrtc/EglBase14.java -+++ b/sdk/android/src/java/org/webrtc/EglBase14.java -@@ -25,7 +25,7 @@ import android.view.Surface; - * and an EGLSurface. - */ - @TargetApi(18) --class EglBase14 extends EglBase { -+public class EglBase14 extends EglBase { - private static final String TAG = "EglBase14"; - private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2; - private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT; -diff --git a/sdk/android/src/jni/pc/peerconnectionobserver_jni.cc b/sdk/android/src/jni/pc/peerconnectionobserver_jni.cc -index 4160f5ace..69ac6a72d 100644 ---- a/sdk/android/src/jni/pc/peerconnectionobserver_jni.cc -+++ b/sdk/android/src/jni/pc/peerconnectionobserver_jni.cc -@@ -167,6 +167,18 @@ void PeerConnectionObserverJni::OnAddStream( - stream_observers_.push_back(std::move(observer)); - } - -+void PeerConnectionObserverJni::OnAddMediaStreamTrackToJavaObject(jobject j_stream, jobject j_track) -+{ -+ // Notify PeerConnection.Observer.onAddTrack -+ ScopedLocalRefFrame local_ref_frame(jni()); -+ jmethodID m = -+ GetMethodID(jni(), *j_observer_class_, "onAddTrack", -+ "(Lorg/webrtc/MediaStream;Lorg/webrtc/MediaStreamTrack;)V"); -+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream, -+ j_track); -+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod"; -+} -+ - void PeerConnectionObserverJni::AddNativeAudioTrackToJavaStream( - rtc::scoped_refptr track, - jobject j_stream) { -@@ -187,6 +199,8 @@ void PeerConnectionObserverJni::AddNativeAudioTrackToJavaStream( - jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track); - CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod"; - RTC_CHECK(added); -+ -+ OnAddMediaStreamTrackToJavaObject(j_stream, j_track); - } - - void PeerConnectionObserverJni::AddNativeVideoTrackToJavaStream( -@@ -209,11 +223,14 @@ void PeerConnectionObserverJni::AddNativeVideoTrackToJavaStream( - jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track); - CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod"; - RTC_CHECK(added); -+ -+ OnAddMediaStreamTrackToJavaObject(j_stream, j_track); - } - - void PeerConnectionObserverJni::RemoveAndDisposeNativeTrackFromJavaTrackList( - MediaStreamTrackInterface* track, -- jobject j_tracks) { -+ jobject j_tracks, -+ jobject j_stream) { - Iterable iterable_tracks(jni(), j_tracks); - for (auto it = iterable_tracks.begin(); it != iterable_tracks.end(); ++it) { - MediaStreamTrackInterface* native_track = -@@ -221,6 +238,13 @@ void PeerConnectionObserverJni::RemoveAndDisposeNativeTrackFromJavaTrackList( - jni()->GetLongField(*it, j_native_track_id_)); - CHECK_EXCEPTION(jni()) << "error during GetLongField"; - if (native_track == track) { -+ { -+ jmethodID m = -+ GetMethodID(jni(), *j_observer_class_, "onRemoveTrack", -+ "(Lorg/webrtc/MediaStream;Lorg/webrtc/MediaStreamTrack;)V"); -+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream, *it); -+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod"; -+ } - jni()->CallVoidMethod(*it, j_track_dispose_id_); - it.Remove(); - return; -@@ -256,7 +280,7 @@ void PeerConnectionObserverJni::OnAudioTrackRemovedFromStream( - jfieldID audio_tracks_id = GetFieldID( - jni(), *j_media_stream_class_, "audioTracks", "Ljava/util/LinkedList;"); - jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id); -- RemoveAndDisposeNativeTrackFromJavaTrackList(track, audio_tracks); -+ RemoveAndDisposeNativeTrackFromJavaTrackList(track, audio_tracks, j_stream); - } - - void PeerConnectionObserverJni::OnVideoTrackRemovedFromStream( -@@ -267,7 +291,7 @@ void PeerConnectionObserverJni::OnVideoTrackRemovedFromStream( - jfieldID video_tracks_id = GetFieldID( - jni(), *j_media_stream_class_, "videoTracks", "Ljava/util/LinkedList;"); - jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id); -- RemoveAndDisposeNativeTrackFromJavaTrackList(track, video_tracks); -+ RemoveAndDisposeNativeTrackFromJavaTrackList(track, video_tracks, j_stream); - } - - void PeerConnectionObserverJni::OnRemoveStream( -@@ -331,7 +355,7 @@ void PeerConnectionObserverJni::OnAddTrack( - - jobjectArray j_stream_array = NativeToJavaMediaStreamArray(jni(), streams); - jmethodID m = -- GetMethodID(jni(), *j_observer_class_, "onAddTrack", -+ GetMethodID(jni(), *j_observer_class_, "onAddRtpReceiver", - "(Lorg/webrtc/RtpReceiver;[Lorg/webrtc/MediaStream;)V"); - jni()->CallVoidMethod(*j_observer_global_, m, j_rtp_receiver, j_stream_array); - CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod"; -diff --git a/sdk/android/src/jni/pc/peerconnectionobserver_jni.h b/sdk/android/src/jni/pc/peerconnectionobserver_jni.h -index a48828b20..492ca43b9 100644 ---- a/sdk/android/src/jni/pc/peerconnectionobserver_jni.h -+++ b/sdk/android/src/jni/pc/peerconnectionobserver_jni.h -@@ -94,7 +94,10 @@ class PeerConnectionObserverJni : public PeerConnectionObserver, - // DCHECKs if the track isn't found. - void RemoveAndDisposeNativeTrackFromJavaTrackList( - MediaStreamTrackInterface* track, -- jobject j_tracks); -+ jobject j_tracks, -+ jobject j_stream); -+ -+ void OnAddMediaStreamTrackToJavaObject(jobject j_stream, jobject j_track); - - // Callbacks invoked when a native stream changes, and the Java stream needs - // to be updated; MediaStreamObserver is used to make this simpler. -diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm -index b004191f8..fbd6083a7 100644 ---- a/sdk/objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm -+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm -@@ -39,6 +39,14 @@ - (void)adaptOutputFormatToWidth:(int)width - self.capturer->AdaptOutputFormat(width, height, fps); - } - -+- (BOOL)IsRunning{ -+ return _capturer->IsRunning(); -+} -+ -+- (void)Stop{ -+ _capturer->Stop(); -+} -+ - - (BOOL)canUseBackCamera { - return self.capturer->CanUseBackCamera(); - } -diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h -index e1017f5e8..879f3e1dc 100644 ---- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h -+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection+Private.h -@@ -11,6 +11,7 @@ - #import "WebRTC/RTCPeerConnection.h" - - #include "api/peerconnectioninterface.h" -+#include "pc/mediastreamobserver.h" - - NS_ASSUME_NONNULL_BEGIN - -@@ -20,7 +21,9 @@ namespace webrtc { - * These objects are created by RTCPeerConnectionFactory to wrap an - * id and call methods on that interface. - */ --class PeerConnectionDelegateAdapter : public PeerConnectionObserver { -+typedef std::map NativeToObjcStreamsMap; -+class PeerConnectionDelegateAdapter : public PeerConnectionObserver, -+ public sigslot::has_slots<> { - - public: - PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection); -@@ -48,9 +51,25 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { - - void OnIceCandidatesRemoved( - const std::vector& candidates) override; -+ -+ // Callbacks invoked when a native stream changes, and the Java stream needs -+ // to be updated; MediaStreamObserver is used to make this simpler. -+ void OnAudioTrackAddedToStream(AudioTrackInterface* track, -+ MediaStreamInterface* stream); -+ -+ void OnVideoTrackAddedToStream(VideoTrackInterface* track, -+ MediaStreamInterface* stream); -+ -+ void OnAudioTrackRemovedFromStream(AudioTrackInterface* track, -+ MediaStreamInterface* stream); -+ -+ void OnVideoTrackRemovedFromStream(VideoTrackInterface* track, -+ MediaStreamInterface* stream); - - private: - __weak RTCPeerConnection *peer_connection_; -+ std::vector> stream_observers_; -+ NativeToObjcStreamsMap remote_streams_; - }; - - } // namespace webrtc -diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm -index e443e850d..abdaf34c5 100644 ---- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm -+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm -@@ -17,9 +17,11 @@ - #import "RTCLegacyStatsReport+Private.h" - #import "RTCMediaConstraints+Private.h" - #import "RTCMediaStream+Private.h" -+#import "RTCMediaStreamTrack+Private.h" - #import "RTCPeerConnectionFactory+Private.h" - #import "RTCRtpReceiver+Private.h" - #import "RTCRtpSender+Private.h" -+#import "RTCVideoTrack+Private.h" - #import "RTCSessionDescription+Private.h" - #import "WebRTC/RTCLogging.h" - -@@ -27,6 +29,7 @@ - - #include "api/jsepicecandidate.h" - #include "rtc_base/checks.h" -+#include "rtc_base/ptr_util.h" - - NSString * const kRTCPeerConnectionErrorDomain = - @"org.webrtc.RTCPeerConnection"; -@@ -127,21 +130,120 @@ void OnFailure(const std::string& error) override { - - void PeerConnectionDelegateAdapter::OnAddStream( - rtc::scoped_refptr stream) { -+ -+ // Create an observer to update the Java stream when the native stream's set -+ // of tracks changes. -+ auto observer = rtc::MakeUnique(stream); -+ observer->SignalAudioTrackRemoved.connect( -+ this, &PeerConnectionDelegateAdapter::OnAudioTrackRemovedFromStream); -+ observer->SignalVideoTrackRemoved.connect( -+ this, &PeerConnectionDelegateAdapter::OnVideoTrackRemovedFromStream); -+ observer->SignalAudioTrackAdded.connect( -+ this, &PeerConnectionDelegateAdapter::OnAudioTrackAddedToStream); -+ observer->SignalVideoTrackAdded.connect( -+ this, &PeerConnectionDelegateAdapter::OnVideoTrackAddedToStream); -+ -+ stream_observers_.push_back(std::move(observer)); -+ - RTCMediaStream *mediaStream = - [[RTCMediaStream alloc] initWithNativeMediaStream:stream]; -+ - RTCPeerConnection *peer_connection = peer_connection_; - [peer_connection.delegate peerConnection:peer_connection - didAddStream:mediaStream]; -+ for (NSUInteger i = 0; i < mediaStream.audioTracks.count; i++) { -+ RTCMediaStreamTrack *mediaStreamTrack = (RTCMediaStreamTrack *)mediaStream.audioTracks[i]; -+ [peer_connection.delegate peerConnection:peer_connection -+ mediaStream:mediaStream -+ didAddTrack:mediaStreamTrack]; -+ } -+ -+ for (NSUInteger i = 0; i < mediaStream.videoTracks.count; i++) { -+ RTCMediaStreamTrack *mediaStreamTrack = (RTCMediaStreamTrack *)mediaStream.videoTracks[i]; -+ [peer_connection.delegate peerConnection:peer_connection -+ mediaStream:mediaStream -+ didAddTrack:mediaStreamTrack]; -+ } - } - - void PeerConnectionDelegateAdapter::OnRemoveStream( - rtc::scoped_refptr stream) { -+ -+ // Remove the observer first, so it doesn't react to events during deletion. -+ stream_observers_.erase( -+ std::remove_if(stream_observers_.begin(), -+ stream_observers_.end(), -+ [stream](const std::unique_ptr& observer) { -+ return observer->stream() == stream; -+ }), -+ stream_observers_.end()); - RTCMediaStream *mediaStream = - [[RTCMediaStream alloc] initWithNativeMediaStream:stream]; - RTCPeerConnection *peer_connection = peer_connection_; - [peer_connection.delegate peerConnection:peer_connection - didRemoveStream:mediaStream]; - } -+ -+void PeerConnectionDelegateAdapter::OnAudioTrackAddedToStream(AudioTrackInterface* track, -+ MediaStreamInterface* stream) { -+ RTCMediaStream *mediaStream = -+ [[RTCMediaStream alloc] initWithNativeMediaStream:stream]; -+ -+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio; -+ RTCMediaStreamTrack *mediaStreamTrack = -+ [[RTCMediaStreamTrack alloc] initWithNativeTrack:track type:type]; -+ -+ RTCPeerConnection *peer_connection = peer_connection_; -+ [peer_connection.delegate peerConnection:peer_connection -+ mediaStream:mediaStream -+ didAddTrack:mediaStreamTrack]; -+} -+ -+void PeerConnectionDelegateAdapter::OnVideoTrackAddedToStream(VideoTrackInterface* track, -+ MediaStreamInterface* stream){ -+ -+ RTCMediaStream *mediaStream = -+ [[RTCMediaStream alloc] initWithNativeMediaStream:stream]; -+ -+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo; -+ RTCMediaStreamTrack *mediaStreamTrack = -+ [[RTCMediaStreamTrack alloc] initWithNativeTrack:track type:type]; -+ -+ RTCPeerConnection *peer_connection = peer_connection_; -+ [peer_connection.delegate peerConnection:peer_connection -+ mediaStream:mediaStream -+ didAddTrack:mediaStreamTrack]; -+} -+ -+void PeerConnectionDelegateAdapter::OnAudioTrackRemovedFromStream(AudioTrackInterface* track, -+ MediaStreamInterface* stream){ -+ RTCMediaStream *mediaStream = -+ [[RTCMediaStream alloc] initWithNativeMediaStream:stream]; -+ -+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio; -+ RTCMediaStreamTrack *mediaStreamTrack = -+ [[RTCMediaStreamTrack alloc] initWithNativeTrack:track type:type]; -+ -+ RTCPeerConnection *peer_connection = peer_connection_; -+ [peer_connection.delegate peerConnection:peer_connection -+ mediaStream:mediaStream -+ didRemoveTrack:mediaStreamTrack]; -+} -+ -+void PeerConnectionDelegateAdapter::OnVideoTrackRemovedFromStream(VideoTrackInterface* track, -+ MediaStreamInterface* stream){ -+ RTCMediaStream *mediaStream = -+ [[RTCMediaStream alloc] initWithNativeMediaStream:stream]; -+ -+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo; -+ RTCMediaStreamTrack *mediaStreamTrack = -+ [[RTCMediaStreamTrack alloc] initWithNativeTrack:track type:type]; -+ -+ RTCPeerConnection *peer_connection = peer_connection_; -+ [peer_connection.delegate peerConnection:peer_connection -+ mediaStream:mediaStream -+ didRemoveTrack:mediaStreamTrack]; -+} - - void PeerConnectionDelegateAdapter::OnDataChannel( - rtc::scoped_refptr data_channel) { -diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm -index 19dd2452c..77282f9a9 100644 ---- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm -+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm -@@ -9,12 +9,43 @@ - */ - - #import "RTCVideoFrame+Private.h" -+#import "RTCI420Buffer+Private.h" - - #import "WebRTC/RTCVideoFrame.h" - #import "WebRTC/RTCVideoFrameBuffer.h" - - #include "api/video/video_frame.h" - #include "rtc_base/timeutils.h" -+#include "libyuv.h" -+ -+ -+// static -+rtc::scoped_refptr I420BufferRotate(const id src, webrtc::VideoRotation rotation) { -+ RTC_CHECK(src.dataY); -+ RTC_CHECK(src.dataU); -+ RTC_CHECK(src.dataV); -+ -+ int rotated_width = src.width; -+ int rotated_height = src.height; -+ if (rotation == webrtc::kVideoRotation_90 || -+ rotation == webrtc::kVideoRotation_270) { -+ std::swap(rotated_width, rotated_height); -+ } -+ -+ rtc::scoped_refptr buffer = -+ webrtc::I420Buffer::Create(rotated_width, rotated_height); -+ -+ RTC_CHECK_EQ(0, libyuv::I420Rotate( -+ src.dataY, src.strideY, -+ src.dataU, src.strideU, -+ src.dataV, src.strideV, -+ buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), -+ buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), -+ src.width, src.height, -+ static_cast(rotation))); -+ -+ return buffer; -+} - - id nativeToRtcFrameBuffer( - const rtc::scoped_refptr &buffer) { -@@ -112,4 +143,67 @@ - (instancetype)initWithNativeVideoFrame:(const webrtc::VideoFrame &)frame { - return videoFrame; - } - -+ -+-(void)CopyI420BufferToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer; -+{ -+ id src = [self.buffer toI420]; -+ CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); -+ rtc::scoped_refptr buffer = I420BufferRotate(src, (webrtc::VideoRotation)self.rotation); -+ RTCI420Buffer *i420Buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:buffer]; -+ -+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); -+ if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || -+ pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { -+ // NV12 -+ uint8_t* dstY = static_cast(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0)); -+ const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); -+ uint8_t* dstUV = static_cast(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1)); -+ const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); -+ -+ libyuv::I420ToNV12(i420Buffer.dataY, -+ i420Buffer.strideY, -+ i420Buffer.dataU, -+ i420Buffer.strideU, -+ i420Buffer.dataV, -+ i420Buffer.strideV, -+ dstY, -+ dstYStride, -+ dstUV, -+ dstUVStride, -+ i420Buffer.width, -+ i420Buffer.height); -+ } else { -+ uint8_t* dst = static_cast(CVPixelBufferGetBaseAddress(outputPixelBuffer)); -+ const int bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); -+ -+ if (pixelFormat == kCVPixelFormatType_32BGRA) { -+ // Corresponds to libyuv::FOURCC_ARGB -+ libyuv::I420ToARGB(i420Buffer.dataY, -+ i420Buffer.strideY, -+ i420Buffer.dataU, -+ i420Buffer.strideU, -+ i420Buffer.dataV, -+ i420Buffer.strideV, -+ dst, -+ bytesPerRow, -+ i420Buffer.width, -+ i420Buffer.height); -+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) { -+ // Corresponds to libyuv::FOURCC_BGRA -+ libyuv::I420ToBGRA(i420Buffer.dataY, -+ i420Buffer.strideY, -+ i420Buffer.dataU, -+ i420Buffer.strideU, -+ i420Buffer.dataV, -+ i420Buffer.strideV, -+ dst, -+ bytesPerRow, -+ i420Buffer.width, -+ i420Buffer.height); -+ } -+ } -+ -+ CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -+} -+ - @end -diff --git a/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m b/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m -index 20a6082a7..de28bc2d1 100644 ---- a/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m -+++ b/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m -@@ -59,7 +59,8 @@ - (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut - kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width, - height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut); - if (ret != kCVReturnSuccess) { -- CFRelease(*textureOut); -+ if(*textureOut) -+ CFRelease(*textureOut); - *textureOut = nil; - return NO; - } -diff --git a/sdk/objc/Framework/Classes/Video/objcvideotracksource.h b/sdk/objc/Framework/Classes/Video/objcvideotracksource.h -index 27c7295dd..7c46582fc 100644 ---- a/sdk/objc/Framework/Classes/Video/objcvideotracksource.h -+++ b/sdk/objc/Framework/Classes/Video/objcvideotracksource.h -@@ -25,7 +25,7 @@ class ObjcVideoTrackSource : public rtc::AdaptedVideoTrackSource { - - // This class can not be used for implementing screen casting. Hopefully, this - // function will be removed before we add that to iOS/Mac. -- bool is_screencast() const override { return false; } -+ bool is_screencast() const override { return true; } - - // Indicates that the encoder should denoise video before encoding it. - // If it is not set, the default configuration is used which is different -diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h b/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h -index 6d369b340..f2f0b154f 100644 ---- a/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h -+++ b/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h -@@ -31,6 +31,10 @@ RTC_EXPORT - - - (instancetype)init NS_UNAVAILABLE; - -+- (BOOL)IsRunning; -+ -+- (void)Stop; -+ - /** - * Calling this function will cause frames to be scaled down to the - * requested resolution. Also, frames will be cropped to match the -diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h b/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h -index 7b0c4492f..8c7782c54 100644 ---- a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h -+++ b/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h -@@ -22,6 +22,7 @@ - @class RTCPeerConnectionFactory; - @class RTCRtpReceiver; - @class RTCRtpSender; -+@class RTCVideoTrack; - @class RTCSessionDescription; - @class RTCLegacyStatsReport; - -@@ -83,6 +84,16 @@ RTC_EXPORT - - (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveStream:(RTCMediaStream *)stream; - -+/** Called when media is received on a new track from remote stream. */ -+- (void)peerConnection:(RTCPeerConnection *)peerConnection -+ mediaStream:(RTCMediaStream *)stream -+ didAddTrack:(RTCMediaStreamTrack*)track; -+ -+/** Called when a remote stream closes a track. */ -+- (void)peerConnection:(RTCPeerConnection *)peerConnection -+ mediaStream:(RTCMediaStream *)stream -+ didRemoveTrack:(RTCMediaStreamTrack*)track; -+ - /** Called when negotiation is needed, for example ICE has restarted. */ - - (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection; - -diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h -index dcc4c6e7b..aaf62abc9 100644 ---- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h -+++ b/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h -@@ -80,6 +80,8 @@ RTC_EXPORT - */ - - (RTCVideoFrame *)newI420VideoFrame; - -+- (void)CopyI420BufferToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer; -+ - @end - - NS_ASSUME_NONNULL_END diff --git a/elinux/CMakeLists.txt b/elinux/CMakeLists.txt new file mode 100644 index 0000000000..3454f57da4 --- /dev/null +++ b/elinux/CMakeLists.txt @@ -0,0 +1,59 @@ +cmake_minimum_required(VERSION 3.15) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed +set(PLUGIN_NAME "flutter_webrtc_plugin") + +#add_definitions(-DLIB_WEBRTC_API_DLL) +add_definitions(-DRTC_DESKTOP_DEVICE) +add_definitions(-DFLUTTER_ELINUX) + +add_library(${PLUGIN_NAME} SHARED + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "../common/cpp/src/flutter_common.cc" + "flutter_webrtc_plugin.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE + flutter + flutter_wrapper_plugin + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" + PARENT_SCOPE +) + +# Add $ORIGIN to RPATH so that lib/libflutter_webrtc_plugin.so can find lib/libwebrtc.so at runtime +set_property( + TARGET ${PLUGIN_NAME} + PROPERTY BUILD_RPATH + "\$ORIGIN" +) \ No newline at end of file diff --git a/elinux/flutter_webrtc/flutter_web_r_t_c_plugin.h b/elinux/flutter_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..9889514be8 --- /dev/null +++ b/elinux/flutter_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,24 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + + +#if defined(__cplusplus) +extern "C" { +#endif + +FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar); + +#if defined(__cplusplus) +} // extern "C" +#endif + +#endif // PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ diff --git a/elinux/flutter_webrtc_plugin.cc b/elinux/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..4e8a36656c --- /dev/null +++ b/elinux/flutter_webrtc_plugin.cc @@ -0,0 +1,75 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" + +const char* kChannelName = "FlutterWebRTC.Method"; + +//#if defined(_WINDOWS) + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + TaskRunner* task_runner() { return nullptr; } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()) { + webrtc_ = std::make_unique(this); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; +}; + +} // namespace flutter_webrtc_plugin + +void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar) { + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + plugin_registrar); +} diff --git a/example/.gitignore b/example/.gitignore new file mode 100644 index 0000000000..f70e6e8ce0 --- /dev/null +++ b/example/.gitignore @@ -0,0 +1,40 @@ +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# The .vscode folder contains launch configuration and tasks you configure in +# VS Code which you may wish to be included in version control, so this line +# is commented out by default. +#.vscode/ + +# Flutter/Dart/Pub related +**/doc/api/ +.dart_tool/ +.flutter-plugins +.flutter-plugins-dependencies +.packages +.pub-cache/ +.pub/ +/build/ +.metadata + +# Web related + +# Symbolication related +app.*.symbols + +# Exceptions to above rules. +!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages diff --git a/example/README.md b/example/README.md index 8db6bd0440..39eddc96a5 100644 --- a/example/README.md +++ b/example/README.md @@ -4,5 +4,37 @@ Demonstrates how to use the webrtc plugin. ## Getting Started -For help getting started with Flutter, view our online -[documentation](https://flutter.io/). +Make sure your flutter is using the `dev` channel. + +```bash +flutter channel dev +./scripts/project_tools.sh create +``` + +Android/iOS + +```bash +flutter run +``` + +macOS + +```bash +flutter run -d macos +``` + +Web + +```bash +dart compile js ../web/e2ee.worker.dart -o web/e2ee.worker.dart.js +flutter run -d web +``` + +Windows + +```bash +flutter channel master +flutter create --platforms windows . +flutter run -d windows +``` + diff --git a/example/analysis_options.yaml b/example/analysis_options.yaml new file mode 100644 index 0000000000..fea5e03d69 --- /dev/null +++ b/example/analysis_options.yaml @@ -0,0 +1,42 @@ +include: package:pedantic/analysis_options.yaml + +linter: + rules: + - always_declare_return_types + - avoid_empty_else + - await_only_futures + - avoid_returning_null_for_void + - cancel_subscriptions + - directives_ordering + - flutter_style_todos + - sort_constructors_first + - sort_unnamed_constructors_first + - sort_pub_dependencies + - type_init_formals + - unnecessary_brace_in_string_interps + - unnecessary_const + - unnecessary_new + - unnecessary_getters_setters + - unnecessary_null_aware_assignments + - unnecessary_null_in_if_null_operators + - unnecessary_overrides + - unnecessary_parenthesis + - unnecessary_statements + - unnecessary_string_interpolations + - unnecessary_this + - unrelated_type_equality_checks + - use_rethrow_when_possible + - valid_regexps + - void_checks + +analyzer: + errors: + # treat missing required parameters as a warning (not a hint) + missing_required_param: warning + # treat missing returns as a warning (not a hint) + missing_return: warning + # allow having TODOs in the code + todo: ignore + # allow self-reference to deprecated members (we do this because otherwise we have + # to annotate every member in every test, assert, etc, when we deprecate something) + deprecated_member_use_from_same_package: ignore diff --git a/example/android/.gitignore b/example/android/.gitignore new file mode 100644 index 0000000000..6f568019d3 --- /dev/null +++ b/example/android/.gitignore @@ -0,0 +1,13 @@ +gradle-wrapper.jar +/.gradle +/captures/ +/gradlew +/gradlew.bat +/local.properties +GeneratedPluginRegistrant.java + +# Remember to never publicly share your keystore. +# See https://flutter.dev/docs/deployment/android#reference-the-keystore-from-the-app +key.properties +**/*.keystore +**/*.jks diff --git a/example/android/app/.project b/example/android/app/.project deleted file mode 100644 index ac485d7c3e..0000000000 --- a/example/android/app/.project +++ /dev/null @@ -1,23 +0,0 @@ - - - app - Project app created by Buildship. - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.buildship.core.gradleprojectbuilder - - - - - - org.eclipse.jdt.core.javanature - org.eclipse.buildship.core.gradleprojectnature - - diff --git a/example/android/app/.settings/org.eclipse.buildship.core.prefs b/example/android/app/.settings/org.eclipse.buildship.core.prefs deleted file mode 100644 index 0f0dc4dde1..0000000000 --- a/example/android/app/.settings/org.eclipse.buildship.core.prefs +++ /dev/null @@ -1,2 +0,0 @@ -#Sat May 12 22:12:33 CST 2018 -connection.project.dir=.. diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle index 095daca365..678af1927a 100644 --- a/example/android/app/build.gradle +++ b/example/android/app/build.gradle @@ -1,51 +1,44 @@ -def localProperties = new Properties() -def localPropertiesFile = rootProject.file('local.properties') -if (localPropertiesFile.exists()) { - localPropertiesFile.withReader('UTF-8') { reader -> - localProperties.load(reader) - } -} - -def flutterRoot = localProperties.getProperty('flutter.sdk') -if (flutterRoot == null) { - throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.") +plugins { + id "com.android.application" + id "kotlin-android" + // The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins. + id "dev.flutter.flutter-gradle-plugin" } -apply plugin: 'com.android.application' -apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" - android { - compileSdkVersion 23 + namespace = "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example" + compileSdk = flutter.compileSdkVersion + ndkVersion = flutter.ndkVersion - lintOptions { - disable 'InvalidPackage' + compileOptions { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = JavaVersion.VERSION_1_8 } defaultConfig { // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). - applicationId "com.cloudwebrtc.webrtcexample" - minSdkVersion 21 - targetSdkVersion 23 - versionCode 1 - versionName "1.0" - testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + applicationId = "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example" + // You can update the following values to match your application needs. + // For more information, see: https://flutter.dev/to/review-gradle-config. + minSdk = flutter.minSdkVersion + targetSdk = flutter.targetSdkVersion + versionCode = flutter.versionCode + versionName = flutter.versionName } buildTypes { release { // TODO: Add your own signing config for the release build. // Signing with the debug keys for now, so `flutter run --release` works. - signingConfig signingConfigs.debug + signingConfig = signingConfigs.debug } } } flutter { - source '../..' -} - -dependencies { - testImplementation 'junit:junit:4.12' - androidTestImplementation 'com.android.support.test:runner:1.0.1' - androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1' + source = "../.." } diff --git a/example/android/app/src/debug/AndroidManifest.xml b/example/android/app/src/debug/AndroidManifest.xml new file mode 100644 index 0000000000..399f6981d5 --- /dev/null +++ b/example/android/app/src/debug/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml index 04a3101d4d..30096cd127 100644 --- a/example/android/app/src/main/AndroidManifest.xml +++ b/example/android/app/src/main/AndroidManifest.xml @@ -1,49 +1,51 @@ - - - - - - - + + - - - + + + + + + - + + android:name="io.flutter.embedding.android.NormalTheme" + android:resource="@style/NormalTheme" + /> + + + diff --git a/example/android/app/src/main/java/com/cloudwebrtc/flutterflutterexample/flutter_webrtc_example/MainActivity.java b/example/android/app/src/main/java/com/cloudwebrtc/flutterflutterexample/flutter_webrtc_example/MainActivity.java new file mode 100644 index 0000000000..9ff32ddb60 --- /dev/null +++ b/example/android/app/src/main/java/com/cloudwebrtc/flutterflutterexample/flutter_webrtc_example/MainActivity.java @@ -0,0 +1,6 @@ +package com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example; + +import io.flutter.embedding.android.FlutterActivity; + +public class MainActivity extends FlutterActivity { +} diff --git a/example/android/app/src/main/java/com/cloudwebrtc/webrtcexample/MainActivity.java b/example/android/app/src/main/java/com/cloudwebrtc/webrtcexample/MainActivity.java deleted file mode 100644 index 8446512694..0000000000 --- a/example/android/app/src/main/java/com/cloudwebrtc/webrtcexample/MainActivity.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.cloudwebrtc.webrtcexample; - -import android.os.Bundle; - -import io.flutter.app.FlutterActivity; -import io.flutter.plugins.GeneratedPluginRegistrant; - -public class MainActivity extends FlutterActivity { - @Override - protected void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - GeneratedPluginRegistrant.registerWith(this); - } -} diff --git a/example/android/app/src/main/res/drawable-v21/launch_background.xml b/example/android/app/src/main/res/drawable-v21/launch_background.xml new file mode 100644 index 0000000000..f74085f3f6 --- /dev/null +++ b/example/android/app/src/main/res/drawable-v21/launch_background.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/example/android/app/src/main/res/values-night/styles.xml b/example/android/app/src/main/res/values-night/styles.xml new file mode 100644 index 0000000000..06952be745 --- /dev/null +++ b/example/android/app/src/main/res/values-night/styles.xml @@ -0,0 +1,18 @@ + + + + + + + diff --git a/example/android/app/src/main/res/values/styles.xml b/example/android/app/src/main/res/values/styles.xml index 00fa4417cf..cb1ef88056 100644 --- a/example/android/app/src/main/res/values/styles.xml +++ b/example/android/app/src/main/res/values/styles.xml @@ -1,8 +1,18 @@ - + + diff --git a/example/android/app/src/profile/AndroidManifest.xml b/example/android/app/src/profile/AndroidManifest.xml new file mode 100644 index 0000000000..399f6981d5 --- /dev/null +++ b/example/android/app/src/profile/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/example/android/build.gradle b/example/android/build.gradle index d4225c7905..d2ffbffa4c 100644 --- a/example/android/build.gradle +++ b/example/android/build.gradle @@ -1,29 +1,18 @@ -buildscript { - repositories { - google() - jcenter() - } - - dependencies { - classpath 'com.android.tools.build:gradle:3.1.2' - } -} - allprojects { repositories { google() - jcenter() + mavenCentral() } } -rootProject.buildDir = '../build' +rootProject.buildDir = "../build" subprojects { project.buildDir = "${rootProject.buildDir}/${project.name}" } subprojects { - project.evaluationDependsOn(':app') + project.evaluationDependsOn(":app") } -task clean(type: Delete) { +tasks.register("clean", Delete) { delete rootProject.buildDir } diff --git a/example/android/gradle.properties b/example/android/gradle.properties index 8bd86f6805..94adc3a3f9 100644 --- a/example/android/gradle.properties +++ b/example/android/gradle.properties @@ -1 +1,3 @@ org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true diff --git a/example/android/gradle/wrapper/gradle-wrapper.properties b/example/android/gradle/wrapper/gradle-wrapper.properties index fc894ea3ed..bc5bce7aa1 100644 --- a/example/android/gradle/wrapper/gradle-wrapper.properties +++ b/example/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Sat May 12 22:34:13 CST 2018 +#Sat Nov 09 20:10:39 CST 2024 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip diff --git a/example/android/settings.gradle b/example/android/settings.gradle index 5a2f14fb18..4034dd79cc 100644 --- a/example/android/settings.gradle +++ b/example/android/settings.gradle @@ -1,15 +1,25 @@ -include ':app' +pluginManagement { + def flutterSdkPath = { + def properties = new Properties() + file("local.properties").withInputStream { properties.load(it) } + def flutterSdkPath = properties.getProperty("flutter.sdk") + assert flutterSdkPath != null, "flutter.sdk not set in local.properties" + return flutterSdkPath + }() -def flutterProjectRoot = rootProject.projectDir.parentFile.toPath() + includeBuild("$flutterSdkPath/packages/flutter_tools/gradle") -def plugins = new Properties() -def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins') -if (pluginsFile.exists()) { - pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) } + repositories { + google() + mavenCentral() + gradlePluginPortal() + } } -plugins.each { name, path -> - def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile() - include ":$name" - project(":$name").projectDir = pluginDirectory +plugins { + id "dev.flutter.flutter-plugin-loader" version "1.0.0" + id "com.android.application" version "8.3.0" apply false + id "org.jetbrains.kotlin.android" version "1.7.10" apply false } + +include ":app" \ No newline at end of file diff --git a/example/elinux/.gitignore b/example/elinux/.gitignore new file mode 100644 index 0000000000..229c109991 --- /dev/null +++ b/example/elinux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral/ diff --git a/example/elinux/CMakeLists.txt b/example/elinux/CMakeLists.txt new file mode 100644 index 0000000000..ed844fbcaf --- /dev/null +++ b/example/elinux/CMakeLists.txt @@ -0,0 +1,103 @@ +cmake_minimum_required(VERSION 3.15) +# stop cmake from taking make from CMAKE_SYSROOT +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +project(runner LANGUAGES CXX) + +set(BINARY_NAME "example") + +cmake_policy(SET CMP0063 NEW) + +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Basically we use this include when we got the following error: +# fatal error: 'bits/c++config.h' file not found +include_directories(SYSTEM ${FLUTTER_SYSTEM_INCLUDE_DIRECTORIES}) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) + +# Configure build options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Configure build option to target backend. +if (NOT FLUTTER_TARGET_BACKEND_TYPE) + set(FLUTTER_TARGET_BACKEND_TYPE "wayland" CACHE + STRING "Flutter target backend type" FORCE) + set_property(CACHE FLUTTER_TARGET_BACKEND_TYPE PROPERTY STRINGS + "wayland" "gbm" "eglstream" "x11") +endif() + +# Compilation settings that should be applied to most targets. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_17) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") + +# Flutter library and tool build rules. +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# Application build +add_subdirectory("runner") + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +install(FILES "${FLUTTER_EMBEDDER_LIBRARY}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +if(PLUGIN_BUNDLED_LIBRARIES) + install(FILES "${PLUGIN_BUNDLED_LIBRARIES}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/example/elinux/flutter/CMakeLists.txt b/example/elinux/flutter/CMakeLists.txt new file mode 100644 index 0000000000..f141a3c690 --- /dev/null +++ b/example/elinux/flutter/CMakeLists.txt @@ -0,0 +1,108 @@ +cmake_minimum_required(VERSION 3.15) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper") + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_engine.so") +if(FLUTTER_TARGET_BACKEND_TYPE MATCHES "gbm") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_gbm.so") +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "eglstream") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_eglstream.so") +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "x11") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_x11.so") +else() + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_wayland.so") +endif() + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_EMBEDDER_LIBRARY ${FLUTTER_EMBEDDER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/elinux/" PARENT_SCOPE) +set(AOT_LIBRARY "${EPHEMERAL_DIR}/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "flutter_export.h" + "flutter_plugin_registrar.h" + "flutter_messenger.h" + "flutter_texture_registrar.h" + "flutter_elinux.h" + "flutter_platform_views.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE "${FLUTTER_EMBEDDER_LIBRARY}") +add_dependencies(flutter flutter_assemble) + +# === Wrapper === +list(APPEND CPP_WRAPPER_SOURCES_CORE + "core_implementations.cc" + "standard_codec.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_CORE "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_PLUGIN + "plugin_registrar.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_PLUGIN "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_APP + "flutter_engine.cc" + "flutter_view_controller.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_APP "${WRAPPER_ROOT}/") + +# Wrapper sources needed for a plugin. +add_library(flutter_wrapper_plugin STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} +) +apply_standard_settings(flutter_wrapper_plugin) +set_target_properties(flutter_wrapper_plugin PROPERTIES + POSITION_INDEPENDENT_CODE ON) +set_target_properties(flutter_wrapper_plugin PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_link_libraries(flutter_wrapper_plugin PUBLIC flutter) +target_include_directories(flutter_wrapper_plugin PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_plugin flutter_assemble) + +# Wrapper sources needed for the runner. +add_library(flutter_wrapper_app STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_APP} +) +apply_standard_settings(flutter_wrapper_app) +target_link_libraries(flutter_wrapper_app PUBLIC flutter) +target_include_directories(flutter_wrapper_app PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_app flutter_assemble) + +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + "${FLUTTER_EMBEDDER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} +) diff --git a/example/elinux/flutter/generated_plugin_registrant.cc b/example/elinux/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..8b2a034590 --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.cc @@ -0,0 +1,14 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include + +void RegisterPlugins(flutter::PluginRegistry* registry) { + FlutterWebRTCPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); +} diff --git a/example/elinux/flutter/generated_plugin_registrant.dart b/example/elinux/flutter/generated_plugin_registrant.dart new file mode 100644 index 0000000000..90bf21a80b --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.dart @@ -0,0 +1,10 @@ +// +// Generated file. Do not edit. +// + +// ignore_for_file: lines_longer_than_80_chars + + +// ignore: public_member_api_docs +void registerPlugins() { +} diff --git a/example/elinux/flutter/generated_plugin_registrant.h b/example/elinux/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..a31c23cd89 --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.h @@ -0,0 +1,13 @@ +// +// Generated file. Do not edit. +// + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void RegisterPlugins(flutter::PluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ diff --git a/example/elinux/flutter/generated_plugins.cmake b/example/elinux/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..00419e86ae --- /dev/null +++ b/example/elinux/flutter/generated_plugins.cmake @@ -0,0 +1,16 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/elinux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) diff --git a/example/elinux/runner/CMakeLists.txt b/example/elinux/runner/CMakeLists.txt new file mode 100644 index 0000000000..d15d5ca317 --- /dev/null +++ b/example/elinux/runner/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.15) +project(runner LANGUAGES CXX) + +if(FLUTTER_TARGET_BACKEND_TYPE MATCHES "gbm") + add_definitions(-DFLUTTER_TARGET_BACKEND_GBM) +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "eglstream") + add_definitions(-DFLUTTER_TARGET_BACKEND_EGLSTREAM) +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "x11") + add_definitions(-DFLUTTER_TARGET_BACKEND_X11) +else() + add_definitions(-DFLUTTER_TARGET_BACKEND_WAYLAND) +endif() + +add_executable(${BINARY_NAME} + "flutter_window.cc" + "main.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) +apply_standard_settings(${BINARY_NAME}) +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE flutter flutter_wrapper_app) +target_include_directories(${BINARY_NAME} PRIVATE "${CMAKE_SOURCE_DIR}") +add_dependencies(${BINARY_NAME} flutter_assemble) diff --git a/example/elinux/runner/command_options.h b/example/elinux/runner/command_options.h new file mode 100644 index 0000000000..b0de93165c --- /dev/null +++ b/example/elinux/runner/command_options.h @@ -0,0 +1,402 @@ +// Copyright 2022 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef COMMAND_OPTIONS_ +#define COMMAND_OPTIONS_ + +#include +#include +#include +#include +#include +#include +#include + +namespace commandline { + +namespace { +constexpr char kOptionStyleNormal[] = "--"; +constexpr char kOptionStyleShort[] = "-"; +constexpr char kOptionValueForHelpMessage[] = "="; +} // namespace + +class Exception : public std::exception { + public: + Exception(const std::string& msg) : msg_(msg) {} + ~Exception() throw() {} + + const char* what() const throw() { return msg_.c_str(); } + + private: + std::string msg_; +}; + +class CommandOptions { + public: + CommandOptions() = default; + ~CommandOptions() = default; + + void AddWithoutValue(const std::string& name, + const std::string& short_name, + const std::string& description, + bool required) { + Add(name, short_name, description, "", + ReaderString(), required, false); + } + + void AddInt(const std::string& name, + const std::string& short_name, + const std::string& description, + const int& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderInt(), required, true); + } + + void AddDouble(const std::string& name, + const std::string& short_name, + const std::string& description, + const double& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderDouble(), required, true); + } + + void AddString(const std::string& name, + const std::string& short_name, + const std::string& description, + const std::string& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderString(), required, true); + } + + template + void Add(const std::string& name, + const std::string& short_name, + const std::string& description, + const T default_value, + F reader = F(), + bool required = true, + bool required_value = true) { + if (options_.find(name) != options_.end()) { + std::cerr << "Already registered option: " << name << std::endl; + return; + } + + if (lut_short_options_.find(short_name) != lut_short_options_.end()) { + std::cerr << short_name << "is already registered" << std::endl; + return; + } + lut_short_options_[short_name] = name; + + options_[name] = std::make_unique>( + name, short_name, description, default_value, reader, required, + required_value); + + // register to show help message. + registration_order_options_.push_back(options_[name].get()); + } + + bool Exist(const std::string& name) { + auto itr = options_.find(name); + return itr != options_.end() && itr->second->HasValue(); + } + + template + const T& GetValue(const std::string& name) { + auto itr = options_.find(name); + if (itr == options_.end()) { + throw Exception("Not found: " + name); + } + + auto* option_value = dynamic_cast*>(itr->second.get()); + if (!option_value) { + throw Exception("Type mismatch: " + name); + } + return option_value->GetValue(); + } + + bool Parse(int argc, const char* const* argv) { + if (argc < 1) { + errors_.push_back("No options"); + return false; + } + + command_name_ = argv[0]; + for (auto i = 1; i < argc; i++) { + const std::string arg(argv[i]); + + // normal options: e.g. --bundle=/data/sample/bundle --fullscreen + if (arg.length() > 2 && + arg.substr(0, 2).compare(kOptionStyleNormal) == 0) { + const size_t option_value_len = arg.find("=") != std::string::npos + ? (arg.length() - arg.find("=")) + : 0; + const bool has_value = option_value_len != 0; + std::string option_name = + arg.substr(2, arg.length() - 2 - option_value_len); + + if (options_.find(option_name) == options_.end()) { + errors_.push_back("Not found option: " + option_name); + continue; + } + + if (!has_value && options_[option_name]->IsRequiredValue()) { + errors_.push_back(option_name + " requres an option value"); + continue; + } + + if (has_value && !options_[option_name]->IsRequiredValue()) { + errors_.push_back(option_name + " doesn't requres an option value"); + continue; + } + + if (has_value) { + SetOptionValue(option_name, arg.substr(arg.find("=") + 1)); + } else { + SetOption(option_name); + } + } + // short options: e.g. -f /foo/file.txt -h 640 -abc + else if (arg.length() > 1 && + arg.substr(0, 1).compare(kOptionStyleShort) == 0) { + for (size_t j = 1; j < arg.length(); j++) { + const std::string option_name{argv[i][j]}; + + if (lut_short_options_.find(option_name) == + lut_short_options_.end()) { + errors_.push_back("Not found short option: " + option_name); + break; + } + + if (j == arg.length() - 1 && + options_[lut_short_options_[option_name]]->IsRequiredValue()) { + if (i == argc - 1) { + errors_.push_back("Invalid format option: " + option_name); + break; + } + SetOptionValue(lut_short_options_[option_name], argv[++i]); + } else { + SetOption(lut_short_options_[option_name]); + } + } + } else { + errors_.push_back("Invalid format option: " + arg); + } + } + + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (registration_order_options_[i]->IsRequired() && + !registration_order_options_[i]->HasValue()) { + errors_.push_back( + std::string(registration_order_options_[i]->GetName()) + + " option is mandatory."); + } + } + + return errors_.size() == 0; + } + + std::string GetError() { return errors_.size() > 0 ? errors_[0] : ""; } + + std::vector& GetErrors() { return errors_; } + + std::string ShowHelp() { + std::ostringstream ostream; + + ostream << "Usage: " << command_name_ << " "; + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (registration_order_options_[i]->IsRequired()) { + ostream << registration_order_options_[i]->GetHelpShortMessage() << " "; + } + } + ostream << std::endl; + + ostream << "Global options:" << std::endl; + size_t max_name_len = 0; + for (size_t i = 0; i < registration_order_options_.size(); i++) { + max_name_len = std::max( + max_name_len, registration_order_options_[i]->GetName().length()); + } + + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (!registration_order_options_[i]->GetShortName().empty()) { + ostream << kOptionStyleShort + << registration_order_options_[i]->GetShortName() << ", "; + } else { + ostream << std::string(4, ' '); + } + + size_t index_adjust = 0; + constexpr int kSpacerNum = 10; + auto need_value = registration_order_options_[i]->IsRequiredValue(); + ostream << kOptionStyleNormal + << registration_order_options_[i]->GetName(); + if (need_value) { + ostream << kOptionValueForHelpMessage; + index_adjust += std::string(kOptionValueForHelpMessage).length(); + } + ostream << std::string( + max_name_len + kSpacerNum - index_adjust - + registration_order_options_[i]->GetName().length(), + ' '); + ostream << registration_order_options_[i]->GetDescription() << std::endl; + } + + return ostream.str(); + } + + private: + struct ReaderInt { + int operator()(const std::string& value) { return std::stoi(value); } + }; + + struct ReaderString { + std::string operator()(const std::string& value) { return value; } + }; + + struct ReaderDouble { + double operator()(const std::string& value) { return std::stod(value); } + }; + + class Option { + public: + Option(const std::string& name, + const std::string& short_name, + const std::string& description, + bool required, + bool required_value) + : name_(name), + short_name_(short_name), + description_(description), + is_required_(required), + is_required_value_(required_value), + value_set_(false){}; + virtual ~Option() = default; + + const std::string& GetName() const { return name_; }; + + const std::string& GetShortName() const { return short_name_; }; + + const std::string& GetDescription() const { return description_; }; + + const std::string GetHelpShortMessage() const { + std::string message = kOptionStyleNormal + name_; + if (is_required_value_) { + message += kOptionValueForHelpMessage; + } + return message; + } + + bool IsRequired() const { return is_required_; }; + + bool IsRequiredValue() const { return is_required_value_; }; + + void Set() { value_set_ = true; }; + + virtual bool SetValue(const std::string& value) = 0; + + virtual bool HasValue() const = 0; + + protected: + std::string name_; + std::string short_name_; + std::string description_; + bool is_required_; + bool is_required_value_; + bool value_set_; + }; + + template + class OptionValue : public Option { + public: + OptionValue(const std::string& name, + const std::string& short_name, + const std::string& description, + const T& default_value, + bool required, + bool required_value) + : Option(name, short_name, description, required, required_value), + default_value_(default_value), + value_(default_value){}; + virtual ~OptionValue() = default; + + bool SetValue(const std::string& value) { + value_ = Read(value); + value_set_ = true; + return true; + } + + bool HasValue() const { return value_set_; } + + const T& GetValue() const { return value_; } + + protected: + virtual T Read(const std::string& s) = 0; + + T default_value_; + T value_; + }; + + template + class OptionValueReader : public OptionValue { + public: + OptionValueReader(const std::string& name, + const std::string& short_name, + const std::string& description, + const T default_value, + F reader, + bool required, + bool required_value) + : OptionValue(name, + short_name, + description, + default_value, + required, + required_value), + reader_(reader) {} + ~OptionValueReader() = default; + + private: + T Read(const std::string& value) { return reader_(value); } + + F reader_; + }; + + bool SetOption(const std::string& name) { + auto itr = options_.find(name); + if (itr == options_.end()) { + errors_.push_back("Unknown option: " + name); + return false; + } + + itr->second->Set(); + return true; + } + + bool SetOptionValue(const std::string& name, const std::string& value) { + auto itr = options_.find(name); + if (itr == options_.end()) { + errors_.push_back("Unknown option: " + name); + return false; + } + + if (!itr->second->SetValue(value)) { + errors_.push_back("Invalid option value: " + name + " = " + value); + return false; + } + return true; + } + + std::string command_name_; + std::unordered_map> options_; + std::unordered_map lut_short_options_; + std::vector registration_order_options_; + std::vector errors_; +}; + +} // namespace commandline + +#endif // COMMAND_OPTIONS_ diff --git a/example/elinux/runner/flutter_embedder_options.h b/example/elinux/runner/flutter_embedder_options.h new file mode 100644 index 0000000000..41d0bd10a3 --- /dev/null +++ b/example/elinux/runner/flutter_embedder_options.h @@ -0,0 +1,203 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_EMBEDDER_OPTIONS_ +#define FLUTTER_EMBEDDER_OPTIONS_ + +#include + +#include + +#include "command_options.h" + +class FlutterEmbedderOptions { + public: + FlutterEmbedderOptions() { + options_.AddString("bundle", "b", "Path to Flutter project bundle", + "./bundle", true); + options_.AddWithoutValue("no-cursor", "n", "No mouse cursor/pointer", + false); + options_.AddInt("rotation", "r", + "Window rotation(degree) [0(default)|90|180|270]", 0, + false); + options_.AddDouble("text-scaling-factor", "x", "Text scaling factor", 1.0, + false); + options_.AddWithoutValue("enable-high-contrast", "i", + "Request that UI be rendered with darker colors.", + false); + options_.AddDouble("force-scale-factor", "s", + "Force a scale factor instead using default value", 1.0, + false); + options_.AddWithoutValue( + "async-vblank", "v", + "Don't sync to compositor redraw/vblank (eglSwapInterval 0)", false); + +#if defined(FLUTTER_TARGET_BACKEND_GBM) || \ + defined(FLUTTER_TARGET_BACKEND_EGLSTREAM) + // no more options. +#elif defined(FLUTTER_TARGET_BACKEND_X11) + options_.AddString("title", "t", "Window title", "Flutter", false); + options_.AddWithoutValue("fullscreen", "f", "Always full-screen display", + false); + options_.AddInt("width", "w", "Window width", 1280, false); + options_.AddInt("height", "h", "Window height", 720, false); +#else // FLUTTER_TARGET_BACKEND_WAYLAND + options_.AddString("title", "t", "Window title", "Flutter", false); + options_.AddString("app-id", "a", "XDG App ID", "dev.flutter.elinux", + false); + options_.AddWithoutValue("onscreen-keyboard", "k", + "Enable on-screen keyboard", false); + options_.AddWithoutValue("window-decoration", "d", + "Enable window decorations", false); + options_.AddWithoutValue("fullscreen", "f", "Always full-screen display", + false); + options_.AddInt("width", "w", "Window width", 1280, false); + options_.AddInt("height", "h", "Window height", 720, false); +#endif + } + ~FlutterEmbedderOptions() = default; + + bool Parse(int argc, char** argv) { + if (!options_.Parse(argc, argv)) { + std::cerr << options_.GetError() << std::endl; + std::cout << options_.ShowHelp(); + return false; + } + + bundle_path_ = options_.GetValue("bundle"); + use_mouse_cursor_ = !options_.Exist("no-cursor"); + if (options_.Exist("rotation")) { + switch (options_.GetValue("rotation")) { + case 90: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_90; + break; + case 180: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_180; + break; + case 270: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_270; + break; + default: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_0; + break; + } + } + + text_scale_factor_ = options_.GetValue("text-scaling-factor"); + enable_high_contrast_ = options_.Exist("enable-high-contrast"); + + if (options_.Exist("force-scale-factor")) { + is_force_scale_factor_ = true; + scale_factor_ = options_.GetValue("force-scale-factor"); + } else { + is_force_scale_factor_ = false; + scale_factor_ = 1.0; + } + + enable_vsync_ = !options_.Exist("async-vblank"); + +#if defined(FLUTTER_TARGET_BACKEND_GBM) || \ + defined(FLUTTER_TARGET_BACKEND_EGLSTREAM) + use_onscreen_keyboard_ = false; + use_window_decoration_ = false; + window_view_mode_ = flutter::FlutterViewController::ViewMode::kFullscreen; +#elif defined(FLUTTER_TARGET_BACKEND_X11) + use_onscreen_keyboard_ = false; + use_window_decoration_ = false; + window_title_ = options_.GetValue("title"); + window_view_mode_ = + options_.Exist("fullscreen") + ? flutter::FlutterViewController::ViewMode::kFullscreen + : flutter::FlutterViewController::ViewMode::kNormal; + window_width_ = options_.GetValue("width"); + window_height_ = options_.GetValue("height"); +#else // FLUTTER_TARGET_BACKEND_WAYLAND + window_title_ = options_.GetValue("title"); + window_app_id_ = options_.GetValue("app-id"); + use_onscreen_keyboard_ = options_.Exist("onscreen-keyboard"); + use_window_decoration_ = options_.Exist("window-decoration"); + window_view_mode_ = + options_.Exist("fullscreen") + ? flutter::FlutterViewController::ViewMode::kFullscreen + : flutter::FlutterViewController::ViewMode::kNormal; + window_width_ = options_.GetValue("width"); + window_height_ = options_.GetValue("height"); +#endif + + return true; + } + + std::string BundlePath() const { + return bundle_path_; + } + std::string WindowTitle() const { + return window_title_; + } + std::string WindowAppID() const { + return window_app_id_; + } + bool IsUseMouseCursor() const { + return use_mouse_cursor_; + } + bool IsUseOnscreenKeyboard() const { + return use_onscreen_keyboard_; + } + bool IsUseWindowDecoraation() const { + return use_window_decoration_; + } + flutter::FlutterViewController::ViewMode WindowViewMode() const { + return window_view_mode_; + } + int WindowWidth() const { + return window_width_; + } + int WindowHeight() const { + return window_height_; + } + flutter::FlutterViewController::ViewRotation WindowRotation() const { + return window_view_rotation_; + } + double TextScaleFactor() const { + return text_scale_factor_; + } + bool EnableHighContrast() const { + return enable_high_contrast_; + } + bool IsForceScaleFactor() const { + return is_force_scale_factor_; + } + double ScaleFactor() const { + return scale_factor_; + } + bool EnableVsync() const { + return enable_vsync_; + } + + private: + commandline::CommandOptions options_; + + std::string bundle_path_; + std::string window_title_; + std::string window_app_id_; + bool use_mouse_cursor_ = true; + bool use_onscreen_keyboard_ = false; + bool use_window_decoration_ = false; + flutter::FlutterViewController::ViewMode window_view_mode_ = + flutter::FlutterViewController::ViewMode::kNormal; + int window_width_ = 1280; + int window_height_ = 720; + flutter::FlutterViewController::ViewRotation window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_0; + bool is_force_scale_factor_; + double scale_factor_; + double text_scale_factor_; + bool enable_high_contrast_; + bool enable_vsync_; +}; + +#endif // FLUTTER_EMBEDDER_OPTIONS_ diff --git a/example/elinux/runner/flutter_window.cc b/example/elinux/runner/flutter_window.cc new file mode 100644 index 0000000000..0c5b6397d1 --- /dev/null +++ b/example/elinux/runner/flutter_window.cc @@ -0,0 +1,79 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "flutter_window.h" + +#include +#include +#include +#include + +#include "flutter/generated_plugin_registrant.h" + +FlutterWindow::FlutterWindow( + const flutter::FlutterViewController::ViewProperties view_properties, + const flutter::DartProject project) + : view_properties_(view_properties), project_(project) {} + +bool FlutterWindow::OnCreate() { + flutter_view_controller_ = std::make_unique( + view_properties_, project_); + + // Ensure that basic setup of the controller was successful. + if (!flutter_view_controller_->engine() || + !flutter_view_controller_->view()) { + return false; + } + + // Register Flutter plugins. + RegisterPlugins(flutter_view_controller_->engine()); + + return true; +} + +void FlutterWindow::OnDestroy() { + if (flutter_view_controller_) { + flutter_view_controller_ = nullptr; + } +} + +void FlutterWindow::Run() { + // Main loop. + auto next_flutter_event_time = + std::chrono::steady_clock::time_point::clock::now(); + while (flutter_view_controller_->view()->DispatchEvent()) { + // Wait until the next event. + { + auto wait_duration = + std::max(std::chrono::nanoseconds(0), + next_flutter_event_time - + std::chrono::steady_clock::time_point::clock::now()); + std::this_thread::sleep_for( + std::chrono::duration_cast(wait_duration)); + } + + // Processes any pending events in the Flutter engine, and returns the + // number of nanoseconds until the next scheduled event (or max, if none). + auto wait_duration = flutter_view_controller_->engine()->ProcessMessages(); + { + auto next_event_time = std::chrono::steady_clock::time_point::max(); + if (wait_duration != std::chrono::nanoseconds::max()) { + next_event_time = + std::min(next_event_time, + std::chrono::steady_clock::time_point::clock::now() + + wait_duration); + } else { + // Wait for the next frame if no events. + auto frame_rate = flutter_view_controller_->view()->GetFrameRate(); + next_event_time = std::min( + next_event_time, + std::chrono::steady_clock::time_point::clock::now() + + std::chrono::milliseconds( + static_cast(std::trunc(1000000.0 / frame_rate)))); + } + next_flutter_event_time = + std::max(next_flutter_event_time, next_event_time); + } + } +} diff --git a/example/elinux/runner/flutter_window.h b/example/elinux/runner/flutter_window.h new file mode 100644 index 0000000000..20b9cb8882 --- /dev/null +++ b/example/elinux/runner/flutter_window.h @@ -0,0 +1,34 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_WINDOW_ +#define FLUTTER_WINDOW_ + +#include +#include + +#include + +class FlutterWindow { + public: + explicit FlutterWindow( + const flutter::FlutterViewController::ViewProperties view_properties, + const flutter::DartProject project); + ~FlutterWindow() = default; + + // Prevent copying. + FlutterWindow(FlutterWindow const&) = delete; + FlutterWindow& operator=(FlutterWindow const&) = delete; + + bool OnCreate(); + void OnDestroy(); + void Run(); + + private: + flutter::FlutterViewController::ViewProperties view_properties_; + flutter::DartProject project_; + std::unique_ptr flutter_view_controller_; +}; + +#endif // FLUTTER_WINDOW_ diff --git a/example/elinux/runner/main.cc b/example/elinux/runner/main.cc new file mode 100644 index 0000000000..579daee650 --- /dev/null +++ b/example/elinux/runner/main.cc @@ -0,0 +1,53 @@ +// Copyright 2021 Sony Corporation. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include +#include + +#include +#include +#include + +#include "flutter_embedder_options.h" +#include "flutter_window.h" + +int main(int argc, char** argv) { + FlutterEmbedderOptions options; + if (!options.Parse(argc, argv)) { + return 0; + } + + // Creates the Flutter project. + const auto bundle_path = options.BundlePath(); + const std::wstring fl_path(bundle_path.begin(), bundle_path.end()); + flutter::DartProject project(fl_path); + auto command_line_arguments = std::vector(); + project.set_dart_entrypoint_arguments(std::move(command_line_arguments)); + + flutter::FlutterViewController::ViewProperties view_properties = {}; + view_properties.width = options.WindowWidth(); + view_properties.height = options.WindowHeight(); + view_properties.view_mode = options.WindowViewMode(); + view_properties.view_rotation = options.WindowRotation(); + view_properties.title = options.WindowTitle(); + view_properties.app_id = options.WindowAppID(); + view_properties.use_mouse_cursor = options.IsUseMouseCursor(); + view_properties.use_onscreen_keyboard = options.IsUseOnscreenKeyboard(); + view_properties.use_window_decoration = options.IsUseWindowDecoraation(); + view_properties.text_scale_factor = options.TextScaleFactor(); + view_properties.enable_high_contrast = options.EnableHighContrast(); + view_properties.force_scale_factor = options.IsForceScaleFactor(); + view_properties.scale_factor = options.ScaleFactor(); + view_properties.enable_vsync = options.EnableVsync(); + + // The Flutter instance hosted by this window. + FlutterWindow window(view_properties, project); + if (!window.OnCreate()) { + return 0; + } + window.Run(); + window.OnDestroy(); + + return 0; +} diff --git a/example/ios/.gitignore b/example/ios/.gitignore index 1e1aafd633..7a7f9873ad 100644 --- a/example/ios/.gitignore +++ b/example/ios/.gitignore @@ -1,42 +1,34 @@ -.idea/ -.vagrant/ -.sconsign.dblite -.svn/ - -.DS_Store -*.swp -profile - -DerivedData/ -build/ -GeneratedPluginRegistrant.h -GeneratedPluginRegistrant.m - -*.pbxuser +**/dgph *.mode1v3 *.mode2v3 +*.moved-aside +*.pbxuser *.perspectivev3 - -!default.pbxuser +**/*sync/ +.sconsign.dblite +.tags* +**/.vagrant/ +**/DerivedData/ +Icon? +**/Pods/ +**/.symlinks/ +profile +xcuserdata +**/.generated/ +Flutter/App.framework +Flutter/Flutter.framework +Flutter/Flutter.podspec +Flutter/Generated.xcconfig +Flutter/ephemeral/ +Flutter/app.flx +Flutter/app.zip +Flutter/flutter_assets/ +Flutter/flutter_export_environment.sh +ServiceDefinitions.json +Runner/GeneratedPluginRegistrant.* + +# Exceptions to above rules. !default.mode1v3 !default.mode2v3 +!default.pbxuser !default.perspectivev3 - -xcuserdata - -*.moved-aside - -*.pyc -*sync/ -Icon? -.tags* - -/Flutter/app.flx -/Flutter/app.zip -/Flutter/flutter_assets/ -/Flutter/App.framework -/Flutter/Flutter.framework -/Flutter/Generated.xcconfig -/ServiceDefinitions.json - -Pods/ diff --git a/example/ios/Flutter/AppFrameworkInfo.plist b/example/ios/Flutter/AppFrameworkInfo.plist index 6c2de8086b..1dc6cf7652 100644 --- a/example/ios/Flutter/AppFrameworkInfo.plist +++ b/example/ios/Flutter/AppFrameworkInfo.plist @@ -20,11 +20,7 @@ ???? CFBundleVersion 1.0 - UIRequiredDeviceCapabilities - - arm64 - MinimumOSVersion - 8.0 + 13.0 diff --git a/example/ios/Flutter/Debug.xcconfig b/example/ios/Flutter/Debug.xcconfig index e8efba1146..ec97fc6f30 100644 --- a/example/ios/Flutter/Debug.xcconfig +++ b/example/ios/Flutter/Debug.xcconfig @@ -1,2 +1,2 @@ -#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" #include "Generated.xcconfig" diff --git a/example/ios/Flutter/Release.xcconfig b/example/ios/Flutter/Release.xcconfig index 399e9340e6..c4855bfe20 100644 --- a/example/ios/Flutter/Release.xcconfig +++ b/example/ios/Flutter/Release.xcconfig @@ -1,2 +1,2 @@ -#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" #include "Generated.xcconfig" diff --git a/example/ios/Podfile b/example/ios/Podfile index 4e21184091..ed16470330 100644 --- a/example/ios/Podfile +++ b/example/ios/Podfile @@ -1,63 +1,41 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '9.0' -# source 'https://git.coding.net/hging/Specs.git' +platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' -def parse_KV_file(file, separator='=') - file_abs_path = File.expand_path(file) - if !File.exists? file_abs_path - return []; +project 'Runner', { + 'Debug' => :debug, + 'Profile' => :release, + 'Release' => :release, +} + +def flutter_root + generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__) + unless File.exist?(generated_xcode_build_settings_path) + raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first" + end + + File.foreach(generated_xcode_build_settings_path) do |line| + matches = line.match(/FLUTTER_ROOT\=(.*)/) + return matches[1].strip if matches end - pods_ary = [] - skip_line_start_symbols = ["#", "/"] - File.foreach(file_abs_path) { |line| - next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ } - plugin = line.split(pattern=separator) - if plugin.length == 2 - podname = plugin[0].strip() - path = plugin[1].strip() - podpath = File.expand_path("#{path}", file_abs_path) - pods_ary.push({:name => podname, :path => podpath}); - else - puts "Invalid plugin specification: #{line}" - end - } - return pods_ary + raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get" end +require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root) + +flutter_ios_podfile_setup + target 'Runner' do - # Prepare symlinks folder. We use symlinks to avoid having Podfile.lock - # referring to absolute paths on developers' machines. - system('rm -rf Pods/.symlinks') - system('mkdir -p Pods/.symlinks/plugins') - - # Flutter Pods - generated_xcode_build_settings = parse_KV_file('./Flutter/Generated.xcconfig') - if generated_xcode_build_settings.empty? - puts "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter packages get is executed first." + flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__)) + target 'RunnerTests' do + inherit! :search_paths end - generated_xcode_build_settings.map { |p| - if p[:name] == 'FLUTTER_FRAMEWORK_DIR' - symlink = File.join('Pods', '.symlinks', 'flutter') - File.symlink(File.dirname(p[:path]), symlink) - pod 'Flutter', :path => File.join(symlink, File.basename(p[:path])) - end - } - - # Plugin Pods - plugin_pods = parse_KV_file('../.flutter-plugins') - plugin_pods.map { |p| - symlink = File.join('Pods', '.symlinks', 'plugins', p[:name]) - File.symlink(p[:path], symlink) - pod p[:name], :path => File.join(symlink, 'ios') - } end + post_install do |installer| installer.pods_project.targets.each do |target| - target.build_configurations.each do |config| - config.build_settings['ENABLE_BITCODE'] = 'NO' - end + flutter_additional_ios_build_settings(target) end end diff --git a/example/ios/Runner.xcodeproj/project.pbxproj b/example/ios/Runner.xcodeproj/project.pbxproj index 9c3beedd7c..d974dc02cf 100644 --- a/example/ios/Runner.xcodeproj/project.pbxproj +++ b/example/ios/Runner.xcodeproj/project.pbxproj @@ -3,36 +3,37 @@ archiveVersion = 1; classes = { }; - objectVersion = 46; + objectVersion = 54; objects = { /* Begin PBXBuildFile section */ - 0B2E52A1193C475CC824D4E23320D30B /* flutter_assets in Resources */ = {isa = PBXBuildFile; fileRef = 127FE0431EC071958D67B4152ABE0141 /* flutter_assets */; }; - 229CB39F2D80040AEE1ED23F285ACA94 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A79A511C028ADC462437FA86E6F91AFF /* LaunchScreen.storyboard */; }; - 5AFE2FDDDEBDE94ED82B40DC8B153987 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 9428FFA3DA242B0B23CE49085F8A8C98 /* GeneratedPluginRegistrant.m */; }; - 6354C7FECC33A60C0696301773D78092 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 49096491DEF677A67E9C9D1D2F642FB2 /* AppFrameworkInfo.plist */; }; - 724F1D62234235348CFA527B /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C63925257EB631DA64C78EFD /* libPods-Runner.a */; }; - 970259CB2284F35CC747F170CC0DF853 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FF4B47BE30BEAD46325791691A899103 /* Main.storyboard */; }; - 9D577FED5514C03E7AB7E66F98D7A053 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 33D01B15D123D484F5CECECF4955185C /* main.m */; }; - 9D7F825691A20481792F11A568AD8FAD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A333CF75606D18C461DFF9ABF7427251 /* Assets.xcassets */; }; - AD0EC61B92FA8B6B7CF43562D02EBD49 /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */; }; - C43A44975A03EE6E1AB6C84DE6994F08 /* Generated.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 2271EF72D6523FE6EB695CB1AFED02B6 /* Generated.xcconfig */; }; - CAD7D9F3E9E40787135E737FEDF280DF /* App.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; - CE5D87634A0A12EE98C9DC1D9D5BFFDE /* App.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */; }; - DF12DBCE3921EA73822D14A03AD6F79C /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 68DD12D69C99694B0B6CBEAFEC0CBD89 /* AppDelegate.m */; }; - DFD15F91EC1333E9C1F771E4E93987B1 /* Flutter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */; }; - F16B5D11B6B398BD29B823EAA9ACFC69 /* Flutter.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; + 331C80F4294D02FB00263BE5 /* RunnerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 331C80F3294D02FB00263BE5 /* RunnerTests.m */; }; + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; + 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; /* End PBXBuildFile section */ +/* Begin PBXContainerItemProxy section */ + 331C80F5294D02FB00263BE5 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 97C146E61CF9000F007C117D /* Project object */; + proxyType = 1; + remoteGlobalIDString = 97C146ED1CF9000F007C117D; + remoteInfo = Runner; + }; +/* End PBXContainerItemProxy section */ + /* Begin PBXCopyFilesBuildPhase section */ - E55F38183CFB6C120953C92EF0C091F4 /* Embed Frameworks */ = { + 9705A1C41CF9048500538489 /* Embed Frameworks */ = { isa = PBXCopyFilesBuildPhase; buildActionMask = 2147483647; dstPath = ""; dstSubfolderSpec = 10; files = ( - CAD7D9F3E9E40787135E737FEDF280DF /* App.framework in Embed Frameworks */, - F16B5D11B6B398BD29B823EAA9ACFC69 /* Flutter.framework in Embed Frameworks */, ); name = "Embed Frameworks"; runOnlyForDeploymentPostprocessing = 0; @@ -40,127 +41,135 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ - 127FE0431EC071958D67B4152ABE0141 /* flutter_assets */ = {isa = PBXFileReference; lastKnownFileType = folder; name = flutter_assets; path = Flutter/flutter_assets; sourceTree = SOURCE_ROOT; }; - 21CE142FB216A639B6BE578625A89011 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; - 2271EF72D6523FE6EB695CB1AFED02B6 /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; - 2959FB0650C06639B955C55500B01293 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; - 33D01B15D123D484F5CECECF4955185C /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; - 49096491DEF677A67E9C9D1D2F642FB2 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; - 5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; - 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; - 68DD12D69C99694B0B6CBEAFEC0CBD89 /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; - 6E0F8D876B170C77FD896E647713843C /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; - 843A0C17DB3B7B95DB4CE8FC36407EDD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; - 8DCF6C17523B59ABB095575CE6B532F1 /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; - 9428FFA3DA242B0B23CE49085F8A8C98 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; - A333CF75606D18C461DFF9ABF7427251 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Flutter.framework; path = Flutter/Flutter.framework; sourceTree = ""; }; - C63925257EB631DA64C78EFD /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; - F4B25E6138001208114FB8F975774109 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = App.framework; path = Flutter/App.framework; sourceTree = ""; }; + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; + 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + 331C80F3294D02FB00263BE5 /* RunnerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RunnerTests.m; sourceTree = ""; }; + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; + 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; + 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; + 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; + 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; + 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; + 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ - 00C27919A6B50DC1DE2CD3F64D33BCCA /* Frameworks */ = { + 331C80EE294D02FB00263BE5 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 97C146EB1CF9000F007C117D /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - CE5D87634A0A12EE98C9DC1D9D5BFFDE /* App.framework in Frameworks */, - DFD15F91EC1333E9C1F771E4E93987B1 /* Flutter.framework in Frameworks */, - 724F1D62234235348CFA527B /* libPods-Runner.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 1F6DF5EA9BFBBD5EB76865BC73D9796E /* Supporting Files */ = { + 331C80F2294D02FB00263BE5 /* RunnerTests */ = { isa = PBXGroup; children = ( - 33D01B15D123D484F5CECECF4955185C /* main.m */, + 331C80F3294D02FB00263BE5 /* RunnerTests.m */, ); - name = "Supporting Files"; + path = RunnerTests; sourceTree = ""; }; - 43EB847F420AC177D9B217DF4AF4DD93 /* Products */ = { + 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( - 6E0F8D876B170C77FD896E647713843C /* Runner.app */, + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, + 9740EEB21CF90195004384FC /* Debug.xcconfig */, + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, + 9740EEB31CF90195004384FC /* Generated.xcconfig */, ); - name = Products; - sourceTree = ""; - }; - 4AAEB042555170F933166EF6 /* Pods */ = { - isa = PBXGroup; - children = ( - ); - name = Pods; + name = Flutter; sourceTree = ""; }; - 7533C2DA49EB7DB513E5B7EE /* Frameworks */ = { + 97C146E51CF9000F007C117D = { isa = PBXGroup; children = ( - C63925257EB631DA64C78EFD /* libPods-Runner.a */, + 9740EEB11CF90186004384FC /* Flutter */, + 97C146F01CF9000F007C117D /* Runner */, + 331C80F2294D02FB00263BE5 /* RunnerTests */, + 97C146EF1CF9000F007C117D /* Products */, ); - name = Frameworks; sourceTree = ""; }; - BB99F626A23DAB811A61522FC9119888 = { + 97C146EF1CF9000F007C117D /* Products */ = { isa = PBXGroup; children = ( - D200E37E3AAFC3BCF315E1D381844A67 /* Flutter */, - 43EB847F420AC177D9B217DF4AF4DD93 /* Products */, - E63CF9E523D767D2740E40098133A381 /* Runner */, - 4AAEB042555170F933166EF6 /* Pods */, - 7533C2DA49EB7DB513E5B7EE /* Frameworks */, + 97C146EE1CF9000F007C117D /* Runner.app */, + 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */, ); + name = Products; sourceTree = ""; }; - D200E37E3AAFC3BCF315E1D381844A67 /* Flutter */ = { + 97C146F01CF9000F007C117D /* Runner */ = { isa = PBXGroup; children = ( - 127FE0431EC071958D67B4152ABE0141 /* flutter_assets */, - F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */, - 49096491DEF677A67E9C9D1D2F642FB2 /* AppFrameworkInfo.plist */, - 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */, - B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */, - 2271EF72D6523FE6EB695CB1AFED02B6 /* Generated.xcconfig */, - 5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */, + 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */, + 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */, + 97C146FA1CF9000F007C117D /* Main.storyboard */, + 97C146FD1CF9000F007C117D /* Assets.xcassets */, + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */, + 97C147021CF9000F007C117D /* Info.plist */, + 97C146F11CF9000F007C117D /* Supporting Files */, + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */, + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */, ); - name = Flutter; + path = Runner; sourceTree = ""; }; - E63CF9E523D767D2740E40098133A381 /* Runner */ = { + 97C146F11CF9000F007C117D /* Supporting Files */ = { isa = PBXGroup; children = ( - 1F6DF5EA9BFBBD5EB76865BC73D9796E /* Supporting Files */, - 8DCF6C17523B59ABB095575CE6B532F1 /* AppDelegate.h */, - 68DD12D69C99694B0B6CBEAFEC0CBD89 /* AppDelegate.m */, - A333CF75606D18C461DFF9ABF7427251 /* Assets.xcassets */, - 2959FB0650C06639B955C55500B01293 /* GeneratedPluginRegistrant.h */, - 9428FFA3DA242B0B23CE49085F8A8C98 /* GeneratedPluginRegistrant.m */, - F4B25E6138001208114FB8F975774109 /* Info.plist */, - A79A511C028ADC462437FA86E6F91AFF /* LaunchScreen.storyboard */, - FF4B47BE30BEAD46325791691A899103 /* Main.storyboard */, + 97C146F21CF9000F007C117D /* main.m */, ); - path = Runner; + name = "Supporting Files"; sourceTree = ""; }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - E30F7D64842177B99FDDF63DCA10BCDE /* Runner */ = { + 331C80F0294D02FB00263BE5 /* RunnerTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = 331C80F7294D02FB00263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; + buildPhases = ( + 331C80ED294D02FB00263BE5 /* Sources */, + 331C80EE294D02FB00263BE5 /* Frameworks */, + 331C80EF294D02FB00263BE5 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + 331C80F6294D02FB00263BE5 /* PBXTargetDependency */, + ); + name = RunnerTests; + productName = RunnerTests; + productReference = 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; + 97C146ED1CF9000F007C117D /* Runner */ = { isa = PBXNativeTarget; - buildConfigurationList = 92578F7B9E3F6AC8F17192C290223DCF /* Build configuration list for PBXNativeTarget "Runner" */; + buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( - E550E15CC3C61725E6821A4E /* [CP] Check Pods Manifest.lock */, - 7E9D2C9174A80CAA4BFDFDCB4F72C7A8 /* Run Script */, - FBC56BB6E87C12DAFB6BB5C3C53DF533 /* Sources */, - 00C27919A6B50DC1DE2CD3F64D33BCCA /* Frameworks */, - 8CF819BBBFB9AE05A1821B127DF85DF8 /* Resources */, - E55F38183CFB6C120953C92EF0C091F4 /* Embed Frameworks */, - 3F42640EB4246C17D4CDE846C95B84D3 /* Thin Binary */, - 440B41726F72B87580837854 /* [CP] Embed Pods Frameworks */, + 9740EEB61CF901F6004384FC /* Run Script */, + 97C146EA1CF9000F007C117D /* Sources */, + 97C146EB1CF9000F007C117D /* Frameworks */, + 97C146EC1CF9000F007C117D /* Resources */, + 9705A1C41CF9048500538489 /* Embed Frameworks */, + 3B06AD1E1E4923F5004D2608 /* Thin Binary */, ); buildRules = ( ); @@ -168,101 +177,87 @@ ); name = Runner; productName = Runner; - productReference = 6E0F8D876B170C77FD896E647713843C /* Runner.app */; + productReference = 97C146EE1CF9000F007C117D /* Runner.app */; productType = "com.apple.product-type.application"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ - 18C1723432283E0CC55F10A6DCFD9E02 /* Project object */ = { + 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 0910; - ORGANIZATIONNAME = "The Chromium Authors"; + LastUpgradeCheck = 1300; + ORGANIZATIONNAME = ""; TargetAttributes = { - E30F7D64842177B99FDDF63DCA10BCDE = { + 331C80F0294D02FB00263BE5 = { + CreatedOnToolsVersion = 14.0; + TestTargetID = 97C146ED1CF9000F007C117D; + }; + 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; - DevelopmentTeam = 5J859T6AE8; - SystemCapabilities = { - com.apple.BackgroundModes = { - enabled = 1; - }; - }; }; }; }; - buildConfigurationList = 9FDCD9B0638A340D0E561864FC06FD7A /* Build configuration list for PBXProject "Runner" */; - compatibilityVersion = "Xcode 3.2"; - developmentRegion = English; + buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, Base, ); - mainGroup = BB99F626A23DAB811A61522FC9119888; - productRefGroup = 43EB847F420AC177D9B217DF4AF4DD93 /* Products */; + mainGroup = 97C146E51CF9000F007C117D; + productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( - E30F7D64842177B99FDDF63DCA10BCDE /* Runner */, + 97C146ED1CF9000F007C117D /* Runner */, + 331C80F0294D02FB00263BE5 /* RunnerTests */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - 8CF819BBBFB9AE05A1821B127DF85DF8 /* Resources */ = { + 331C80EF294D02FB00263BE5 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - 6354C7FECC33A60C0696301773D78092 /* AppFrameworkInfo.plist in Resources */, - 9D7F825691A20481792F11A568AD8FAD /* Assets.xcassets in Resources */, - AD0EC61B92FA8B6B7CF43562D02EBD49 /* Debug.xcconfig in Resources */, - C43A44975A03EE6E1AB6C84DE6994F08 /* Generated.xcconfig in Resources */, - 229CB39F2D80040AEE1ED23F285ACA94 /* LaunchScreen.storyboard in Resources */, - 970259CB2284F35CC747F170CC0DF853 /* Main.storyboard in Resources */, - 0B2E52A1193C475CC824D4E23320D30B /* flutter_assets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; -/* End PBXResourcesBuildPhase section */ - -/* Begin PBXShellScriptBuildPhase section */ - 3F42640EB4246C17D4CDE846C95B84D3 /* Thin Binary */ = { - isa = PBXShellScriptBuildPhase; + 97C146EC1CF9000F007C117D /* Resources */ = { + isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - ); - inputPaths = ( - ); - name = "Thin Binary"; - outputPaths = ( + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */, + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" thin"; }; - 440B41726F72B87580837854 /* [CP] Embed Pods Frameworks */ = { +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( - "${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh", - "${PODS_ROOT}/.symlinks/flutter/ios/Flutter.framework", - "${PODS_ROOT}/.symlinks/plugins/webrtc/ios/WebRTC.framework", + "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}", ); - name = "[CP] Embed Pods Frameworks"; + name = "Thin Binary"; outputPaths = ( - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Flutter.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/WebRTC.framework", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; - showEnvVarsInLog = 0; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; - 7E9D2C9174A80CAA4BFDFDCB4F72C7A8 /* Run Script */ = { + 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); @@ -275,62 +270,59 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; - E550E15CC3C61725E6821A4E /* [CP] Check Pods Manifest.lock */ = { - isa = PBXShellScriptBuildPhase; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 331C80ED294D02FB00263BE5 /* Sources */ = { + isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - ); - inputPaths = ( - "${PODS_PODFILE_DIR_PATH}/Podfile.lock", - "${PODS_ROOT}/Manifest.lock", - ); - name = "[CP] Check Pods Manifest.lock"; - outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + 331C80F4294D02FB00263BE5 /* RunnerTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; - showEnvVarsInLog = 0; }; -/* End PBXShellScriptBuildPhase section */ - -/* Begin PBXSourcesBuildPhase section */ - FBC56BB6E87C12DAFB6BB5C3C53DF533 /* Sources */ = { + 97C146EA1CF9000F007C117D /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - DF12DBCE3921EA73822D14A03AD6F79C /* AppDelegate.m in Sources */, - 5AFE2FDDDEBDE94ED82B40DC8B153987 /* GeneratedPluginRegistrant.m in Sources */, - 9D577FED5514C03E7AB7E66F98D7A053 /* main.m in Sources */, + 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */, + 97C146F31CF9000F007C117D /* main.m in Sources */, + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ +/* Begin PBXTargetDependency section */ + 331C80F6294D02FB00263BE5 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 97C146ED1CF9000F007C117D /* Runner */; + targetProxy = 331C80F5294D02FB00263BE5 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + /* Begin PBXVariantGroup section */ - A79A511C028ADC462437FA86E6F91AFF /* LaunchScreen.storyboard */ = { + 97C146FA1CF9000F007C117D /* Main.storyboard */ = { isa = PBXVariantGroup; children = ( - 843A0C17DB3B7B95DB4CE8FC36407EDD /* Base */, + 97C146FB1CF9000F007C117D /* Base */, ); - name = LaunchScreen.storyboard; + name = Main.storyboard; sourceTree = ""; }; - FF4B47BE30BEAD46325791691A899103 /* Main.storyboard */ = { + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = { isa = PBXVariantGroup; children = ( - 21CE142FB216A639B6BE578625A89011 /* Base */, + 97C147001CF9000F007C117D /* Base */, ); - name = Main.storyboard; + name = LaunchScreen.storyboard; sourceTree = ""; }; /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ - 0D181B1A2BE749D68B72CB6CAB984878 /* Release */ = { + 249021D3217E4FDB00AE95B9 /* Profile */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; @@ -342,12 +334,14 @@ CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; @@ -368,43 +362,74 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; + SUPPORTED_PLATFORMS = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; - name = Release; + name = Profile; }; - 4C45D427E97AD8F895ECED02C40BF5E0 /* Release */ = { + 249021D4217E4FDB00AE95B9 /* Profile */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; buildSettings = { - ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO; - ALWAYS_SEARCH_USER_PATHS = NO; - ARCHS = arm64; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = 5J859T6AE8; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; ENABLE_BITCODE = NO; - FRAMEWORK_SEARCH_PATHS = ( - "$(inherited)", - "$(PROJECT_DIR)/Flutter", - "$(PROJECT_DIR)", - ); INFOPLIST_FILE = Runner/Info.plist; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; - LIBRARY_SEARCH_PATHS = ( + LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", - "$(PROJECT_DIR)/Flutter", + "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-example"; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Profile; + }; + 331C80F8294D02FB00263BE5 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; PRODUCT_NAME = "$(TARGET_NAME)"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; + }; + name = Debug; + }; + 331C80F9294D02FB00263BE5 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; name = Release; }; - 879B60EB63E8E59028AA9FB0AAE288D8 /* Debug */ = { + 331C80FA294D02FB00263BE5 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; + }; + name = Profile; + }; + 97C147031CF9000F007C117D /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; @@ -416,12 +441,14 @@ CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; @@ -448,7 +475,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -456,54 +483,126 @@ }; name = Debug; }; - AF60501F7ADFF8F784D4241A34A3EA7C /* Debug */ = { + 97C147041CF9000F007C117D /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */; buildSettings = { - ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO; ALWAYS_SEARCH_USER_PATHS = NO; - ARCHS = arm64; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SUPPORTED_PLATFORMS = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 97C147061CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - DEVELOPMENT_TEAM = 5J859T6AE8; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; ENABLE_BITCODE = NO; - FRAMEWORK_SEARCH_PATHS = ( + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", - "$(PROJECT_DIR)/Flutter", - "$(PROJECT_DIR)", + "@executable_path/Frameworks", ); + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Debug; + }; + 97C147071CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; - LIBRARY_SEARCH_PATHS = ( + LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", - "$(PROJECT_DIR)/Flutter", + "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-example"; + PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; PRODUCT_NAME = "$(TARGET_NAME)"; + VERSIONING_SYSTEM = "apple-generic"; }; - name = Debug; + name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 92578F7B9E3F6AC8F17192C290223DCF /* Build configuration list for PBXNativeTarget "Runner" */ = { + 331C80F7294D02FB00263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 331C80F8294D02FB00263BE5 /* Debug */, + 331C80F9294D02FB00263BE5 /* Release */, + 331C80FA294D02FB00263BE5 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = { isa = XCConfigurationList; buildConfigurations = ( - AF60501F7ADFF8F784D4241A34A3EA7C /* Debug */, - 4C45D427E97AD8F895ECED02C40BF5E0 /* Release */, + 97C147031CF9000F007C117D /* Debug */, + 97C147041CF9000F007C117D /* Release */, + 249021D3217E4FDB00AE95B9 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; - 9FDCD9B0638A340D0E561864FC06FD7A /* Build configuration list for PBXProject "Runner" */ = { + 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = { isa = XCConfigurationList; buildConfigurations = ( - 879B60EB63E8E59028AA9FB0AAE288D8 /* Debug */, - 0D181B1A2BE749D68B72CB6CAB984878 /* Release */, + 97C147061CF9000F007C117D /* Debug */, + 97C147071CF9000F007C117D /* Release */, + 249021D4217E4FDB00AE95B9 /* Profile */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; - rootObject = 18C1723432283E0CC55F10A6DCFD9E02 /* Project object */; + rootObject = 97C146E61CF9000F007C117D /* Project object */; } diff --git a/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata index 1d526a16ed..919434a625 100644 --- a/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata +++ b/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -2,6 +2,6 @@ + location = "self:"> diff --git a/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/ios/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index 1263ac84b1..f7213505ac 100644 --- a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ - - - - + + + + + + - - - - diff --git a/example/ios/Runner/AppDelegate.h b/example/ios/Runner/AppDelegate.h index cf210d213f..36e21bbf9c 100644 --- a/example/ios/Runner/AppDelegate.h +++ b/example/ios/Runner/AppDelegate.h @@ -1,5 +1,5 @@ -#import #import +#import @interface AppDelegate : FlutterAppDelegate diff --git a/example/ios/Runner/AppDelegate.m b/example/ios/Runner/AppDelegate.m index 112becd13b..70e83933db 100644 --- a/example/ios/Runner/AppDelegate.m +++ b/example/ios/Runner/AppDelegate.m @@ -1,9 +1,10 @@ -#include "AppDelegate.h" -#include "GeneratedPluginRegistrant.h" +#import "AppDelegate.h" +#import "GeneratedPluginRegistrant.h" @implementation AppDelegate -- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { +- (BOOL)application:(UIApplication *)application + didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { [GeneratedPluginRegistrant registerWithRegistry:self]; // Override point for customization after application launch. return [super application:application didFinishLaunchingWithOptions:launchOptions]; diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png index 3d43d11e66..dc9ada4725 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png index 28c6bf0301..7353c41ecf 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png index 2ccbfd967d..797d452e45 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png index f091b6b0bc..6ed2d933e1 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png index 4cde12118d..4cd7b0099c 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png index d0ef06e7ed..fe730945a0 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png index dcdc2306c2..321773cd85 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png index 2ccbfd967d..797d452e45 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png index c8f9ed8f5c..502f463a9b 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png index a6d6b8609d..0ec3034392 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png index a6d6b8609d..0ec3034392 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png index 75b2d164a5..e9f5fea27c 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png index c4df70d39d..84ac32ae7d 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png index 6a84f41e14..8953cba090 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png differ diff --git a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png index d0e1f58536..0467bf12aa 100644 Binary files a/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png and b/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png differ diff --git a/example/ios/Runner/Info.plist b/example/ios/Runner/Info.plist index e4e5b5eaf4..14b908826f 100644 --- a/example/ios/Runner/Info.plist +++ b/example/ios/Runner/Info.plist @@ -3,7 +3,9 @@ CFBundleDevelopmentRegion - en + $(DEVELOPMENT_LANGUAGE) + CFBundleDisplayName + Flutter Webrtc Example CFBundleExecutable $(EXECUTABLE_NAME) CFBundleIdentifier @@ -11,36 +13,25 @@ CFBundleInfoDictionaryVersion 6.0 CFBundleName - webrtc_example + flutter_webrtc_example CFBundlePackageType APPL CFBundleShortVersionString - 1.0 + $(FLUTTER_BUILD_NAME) CFBundleSignature ???? CFBundleVersion - 1 + $(FLUTTER_BUILD_NUMBER) LSRequiresIPhoneOS - NSCameraUsageDescription + NSCameraUsageDescription $(PRODUCT_NAME) Camera Usage! NSMicrophoneUsageDescription $(PRODUCT_NAME) Microphone Usage! - NSPhotoLibraryUsageDescription - $(PRODUCT_NAME) PhotoLibrary Usage! - UIBackgroundModes - - fetch - remote-notification - UILaunchStoryboardName LaunchScreen UIMainStoryboardFile Main - UIRequiredDeviceCapabilities - - arm64 - UISupportedInterfaceOrientations UIInterfaceOrientationPortrait @@ -56,5 +47,9 @@ UIViewControllerBasedStatusBarAppearance + CADisableMinimumFrameDurationOnPhone + + UIApplicationSupportsIndirectInputEvents + diff --git a/example/ios/Runner/main.m b/example/ios/Runner/main.m index 0ccc450011..dff6597e45 100644 --- a/example/ios/Runner/main.m +++ b/example/ios/Runner/main.m @@ -1,8 +1,8 @@ -#import #import +#import #import "AppDelegate.h" -int main(int argc, char * argv[]) { +int main(int argc, char* argv[]) { @autoreleasepool { return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); } diff --git a/example/ios/RunnerTests/RunnerTests.m b/example/ios/RunnerTests/RunnerTests.m new file mode 100644 index 0000000000..6d8b0bdeec --- /dev/null +++ b/example/ios/RunnerTests/RunnerTests.m @@ -0,0 +1,16 @@ +#import +#import +#import + +@interface RunnerTests : XCTestCase + +@end + +@implementation RunnerTests + +- (void)testExample { + // If you add code to the Runner application, consider adding tests here. + // See https://developer.apple.com/documentation/xctest for more information about using XCTest. +} + +@end diff --git a/example/lib/main.dart b/example/lib/main.dart index 9ea5f9d2a9..64bfc07d0c 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -1,35 +1,58 @@ -import 'package:flutter/material.dart'; import 'dart:core'; -import 'src/basic_sample/basic_sample.dart'; -import 'src/call_sample/call_sample.dart'; + +import 'package:flutter/foundation.dart' + show debugDefaultTargetPlatformOverride; +import 'package:flutter/material.dart'; +import 'package:flutter_background/flutter_background.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:flutter_webrtc_example/src/capture_frame_sample.dart'; + +import 'src/device_enumeration_sample.dart'; +import 'src/get_display_media_sample.dart'; +import 'src/get_user_media_sample.dart' + if (dart.library.js_interop) 'src/get_user_media_sample_web.dart'; +import 'src/loopback_data_channel_sample.dart'; +import 'src/loopback_sample_unified_tracks.dart'; import 'src/route_item.dart'; -import 'package:shared_preferences/shared_preferences.dart'; -void main() => runApp(new MyApp()); +void main() { + WidgetsFlutterBinding.ensureInitialized(); + if (WebRTC.platformIsDesktop) { + debugDefaultTargetPlatformOverride = TargetPlatform.fuchsia; + } else if (WebRTC.platformIsAndroid) { + //startForegroundService(); + } + runApp(MyApp()); +} + +Future startForegroundService() async { + final androidConfig = FlutterBackgroundAndroidConfig( + notificationTitle: 'Title of the notification', + notificationText: 'Text of the notification', + notificationImportance: AndroidNotificationImportance.normal, + notificationIcon: AndroidResource( + name: 'background_icon', + defType: 'drawable'), // Default is ic_launcher from folder mipmap + ); + await FlutterBackground.initialize(androidConfig: androidConfig); + return FlutterBackground.enableBackgroundExecution(); +} class MyApp extends StatefulWidget { @override - _MyAppState createState() => new _MyAppState(); -} - -enum DialogDemoAction { - cancel, - connect, + _MyAppState createState() => _MyAppState(); } class _MyAppState extends State { - List items; - String _serverAddress = '192.168.31.152'; - SharedPreferences prefs; + late List items; @override - initState() { + void initState() { super.initState(); - _initData(); _initItems(); } - _buildRow(context, item) { + ListBody _buildRow(context, item) { return ListBody(children: [ ListTile( title: Text(item.title), @@ -42,12 +65,13 @@ class _MyAppState extends State { @override Widget build(BuildContext context) { - return new MaterialApp( - home: new Scaffold( - appBar: new AppBar( - title: new Text('Flutter-WebRTC example'), + return MaterialApp( + debugShowCheckedModeBanner: false, + home: Scaffold( + appBar: AppBar( + title: Text('Flutter-WebRTC example'), ), - body: new ListView.builder( + body: ListView.builder( shrinkWrap: true, padding: const EdgeInsets.all(0.0), itemCount: items.length, @@ -57,78 +81,59 @@ class _MyAppState extends State { ); } - _initData() async { - prefs = await SharedPreferences.getInstance(); - setState(() { - _serverAddress = prefs.getString('server') ?? ''; - }); - } - - void showDemoDialog({BuildContext context, Widget child}) { - showDialog( - context: context, - builder: (BuildContext context) => child, - ).then((T value) { - // The value passed to Navigator.pop() or null. - if (value != null) { - if (value == DialogDemoAction.connect) { - prefs.setString('server', _serverAddress); - Navigator.push( - context, - MaterialPageRoute( - builder: (BuildContext context) => - CallSample(ip: _serverAddress))); - } - } - }); - } - - _showAddressDialog(context) { - showDemoDialog( - context: context, - child: new AlertDialog( - title: const Text('Enter server address:'), - content: TextField( - onChanged: (String text) { - setState(() { - _serverAddress = text; - }); - }, - decoration: InputDecoration( - hintText: _serverAddress, - ), - textAlign: TextAlign.center, - ), - actions: [ - new FlatButton( - child: const Text('CANCEL'), - onPressed: () { - Navigator.pop(context, DialogDemoAction.cancel); - }), - new FlatButton( - child: const Text('CONNECT'), - onPressed: () { - Navigator.pop(context, DialogDemoAction.connect); - }) - ])); - } - - _initItems() { + void _initItems() { items = [ RouteItem( - title: 'Basic API Tests', - subtitle: 'Basic API Tests.', + title: 'GetUserMedia', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => GetUserMediaSample())); + }), + RouteItem( + title: 'Device Enumeration', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + DeviceEnumerationSample())); + }), + RouteItem( + title: 'GetDisplayMedia', push: (BuildContext context) { Navigator.push( context, - new MaterialPageRoute( - builder: (BuildContext context) => new BasicSample())); + MaterialPageRoute( + builder: (BuildContext context) => + GetDisplayMediaSample())); }), RouteItem( - title: 'P2P Call Sample', - subtitle: 'P2P Call Sample.', + title: 'LoopBack Sample (Unified Tracks)', push: (BuildContext context) { - _showAddressDialog(context); + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + LoopBackSampleUnifiedTracks())); + }), + RouteItem( + title: 'DataChannelLoopBackSample', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + DataChannelLoopBackSample())); + }), + RouteItem( + title: 'Capture Frame', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => CaptureFrameSample())); }), ]; } diff --git a/example/lib/src/basic_sample/basic_sample.dart b/example/lib/src/basic_sample/basic_sample.dart deleted file mode 100644 index 4a3fe1d252..0000000000 --- a/example/lib/src/basic_sample/basic_sample.dart +++ /dev/null @@ -1,80 +0,0 @@ -import 'package:flutter/material.dart'; -import 'dart:core'; -import 'loopback_sample.dart'; -import 'get_user_media_sample.dart'; -import 'data_channel_sample.dart'; -import '../route_item.dart'; - -typedef void RouteCallback(BuildContext context); - -final List items = [ - RouteItem( - title: 'GetUserMedia Test', - push: (BuildContext context) { - Navigator.push( - context, - new MaterialPageRoute( - builder: (BuildContext context) => new GetUserMediaSample())); - }), - RouteItem( - title: 'LoopBack Sample', - push: (BuildContext context) { - Navigator.push( - context, - new MaterialPageRoute( - builder: (BuildContext context) => new LoopBackSample())); - }), - RouteItem( - title: 'DataChannel Test', - push: (BuildContext context) { - Navigator.push( - context, - new MaterialPageRoute( - builder: (BuildContext context) => new DataChannelSample())); - }), -]; - -class BasicSample extends StatefulWidget { - static String tag = 'basic_sample'; - @override - _BasicSampleState createState() => new _BasicSampleState(); -} - -class _BasicSampleState extends State { - GlobalKey _formKey = new GlobalKey(); - @override - initState() { - super.initState(); - } - - @override - deactivate() { - super.deactivate(); - } - - _buildRow(context, item) { - return ListBody(children: [ - ListTile( - title: Text(item.title), - onTap: () => item.push(context), - trailing: Icon(Icons.arrow_right), - ), - Divider() - ]); - } - - @override - Widget build(BuildContext context) { - return new Scaffold( - appBar: new AppBar( - title: new Text('Basic API Tests'), - ), - body: new ListView.builder( - shrinkWrap: true, - padding: const EdgeInsets.all(0.0), - itemCount: items.length, - itemBuilder: (context, i) { - return _buildRow(context, items[i]); - })); - } -} diff --git a/example/lib/src/basic_sample/data_channel_sample.dart b/example/lib/src/basic_sample/data_channel_sample.dart deleted file mode 100644 index f4a7ec4950..0000000000 --- a/example/lib/src/basic_sample/data_channel_sample.dart +++ /dev/null @@ -1,159 +0,0 @@ -import 'package:flutter/material.dart'; -import 'package:webrtc/webrtc.dart'; -import 'dart:core'; - -class DataChannelSample extends StatefulWidget { - - static String tag = 'data_channel_sample'; - - @override - _DataChannelSampleState createState() => new _DataChannelSampleState(); -} - -class _DataChannelSampleState extends State { - RTCPeerConnection _peerConnection; - bool _inCalling = false; - - RTCDataChannelInit _dataChannelDict = null; - RTCDataChannel _dataChannel; - - String _sdp; - - @override - initState() { - super.initState(); - } - - _onSignalingState(RTCSignalingState state) { - print(state); - } - - _onIceGatheringState(RTCIceGatheringState state) { - print(state); - } - - _onIceConnectionState(RTCIceConnectionState state) { - print(state); - } - - _onCandidate(RTCIceCandidate candidate) { - print('onCandidate: ' + candidate.candidate); - _peerConnection.addCandidate(candidate); - setState(() { - _sdp += '\n'; - _sdp += candidate.candidate; - }); - } - - _onRenegotiationNeeded() { - print('RenegotiationNeeded'); - } - - _onDataChannel(RTCDataChannel dataChannel) { - } - - // Platform messages are asynchronous, so we initialize in an async method. - _makeCall() async { - - Map configuration = { - "iceServers": [ - {"url": "stun:stun.l.google.com:19302"}, - ] - }; - - final Map offer_sdp_constraints = { - "mandatory": { - "OfferToReceiveAudio": false, - "OfferToReceiveVideo": false, - }, - "optional": [], - }; - - final Map loopback_constraints = { - "mandatory": {}, - "optional": [ - {"DtlsSrtpKeyAgreement": true }, - ], - }; - - if (_peerConnection != null) return; - - try { - - _peerConnection = - await createPeerConnection(configuration, loopback_constraints); - - _peerConnection.onSignalingState = _onSignalingState; - _peerConnection.onIceGatheringState = _onIceGatheringState; - _peerConnection.onIceConnectionState = _onIceConnectionState; - _peerConnection.onIceCandidate = _onCandidate; - _peerConnection.onRenegotiationNeeded = _onRenegotiationNeeded; - - _dataChannelDict = new RTCDataChannelInit(); - _dataChannelDict.id = 1; - _dataChannelDict.ordered = true; - _dataChannelDict.maxRetransmitTime = -1; - _dataChannelDict.maxRetransmits = -1; - _dataChannelDict.protocol = "sctp"; - _dataChannelDict.negotiated = false; - - _dataChannel = await _peerConnection.createDataChannel('dataChannel', _dataChannelDict); - _peerConnection.onDataChannel = _onDataChannel; - - RTCSessionDescription description = - await _peerConnection.createOffer(offer_sdp_constraints); - print(description.sdp); - _peerConnection.setLocalDescription(description); - - _sdp = description.sdp; - //change for loopback. - //description.type = 'answer'; - //_peerConnection.setRemoteDescription(description); - } catch (e) { - print(e.toString()); - } - if (!mounted) return; - - setState(() { - _inCalling = true; - }); - } - - _hangUp() async { - try { - await _dataChannel.close(); - await _peerConnection.close(); - _peerConnection = null; - } catch (e) { - print(e.toString()); - } - setState(() { - _inCalling = false; - }); - } - - @override - Widget build(BuildContext context) { - return - new Scaffold( - appBar: new AppBar( - title: new Text('Data Channel Test'), - ), - body: new OrientationBuilder( - builder: (context, orientation) { - return new Center( - child: new Container( - child: _inCalling? Text(_sdp) : Text('data channel test'), - ), - ); - }, - ), - floatingActionButton: new FloatingActionButton( - onPressed: _inCalling ? _hangUp : _makeCall, - tooltip: _inCalling ? 'Hangup' : 'Call', - child: new Icon(_inCalling ? Icons.call_end : Icons.phone), - ), - ); - - } -} diff --git a/example/lib/src/basic_sample/get_user_media_sample.dart b/example/lib/src/basic_sample/get_user_media_sample.dart deleted file mode 100644 index 2c48fff907..0000000000 --- a/example/lib/src/basic_sample/get_user_media_sample.dart +++ /dev/null @@ -1,106 +0,0 @@ -import 'package:flutter/material.dart'; -import 'package:webrtc/webrtc.dart'; -import 'dart:core'; - -/** - * getUserMedia sample - */ -class GetUserMediaSample extends StatefulWidget { - static String tag = 'get_usermedia_sample'; - - @override - _GetUserMediaSampleState createState() => new _GetUserMediaSampleState(); -} - -class _GetUserMediaSampleState extends State { - MediaStream _localStream; - final _localRenderer = new RTCVideoRenderer(); - bool _inCalling = false; - - @override - initState() { - super.initState(); - initRenderers(); - } - - @override - deactivate() { - super.deactivate(); - if (_inCalling) { - _hangUp(); - } - } - - initRenderers() async { - await _localRenderer.initialize(); - } - - // Platform messages are asynchronous, so we initialize in an async method. - _makeCall() async { - final Map mediaConstraints = { - "audio": true, - "video": { - "mandatory": { - "minWidth":'640', // Provide your own width, height and frame rate here - "minHeight": '480', - "minFrameRate": '30', - }, - "facingMode": "user", - "optional": [], - } - }; - - try { - navigator.getUserMedia(mediaConstraints).then((stream){ - _localStream = stream; - _localRenderer.srcObject = _localStream; - }); - } catch (e) { - print(e.toString()); - } - if (!mounted) return; - - setState(() { - _inCalling = true; - }); - } - - _hangUp() async { - try { - await _localStream.dispose(); - _localRenderer.srcObject = null; - } catch (e) { - print(e.toString()); - } - setState(() { - _inCalling = false; - }); - } - - @override - Widget build(BuildContext context) { - return new Scaffold( - appBar: new AppBar( - title: new Text('GetUserMedia API Test'), - ), - body: new OrientationBuilder( - builder: (context, orientation) { - return new Center( - child: new Container( - margin: new EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: MediaQuery.of(context).size.width, - height: MediaQuery.of(context).size.height, - child: RTCVideoView(_localRenderer), - decoration: new BoxDecoration(color: Colors.black54), - ), - ); - }, - ), - floatingActionButton: new FloatingActionButton( - onPressed: _inCalling ? _hangUp : _makeCall, - tooltip: _inCalling ? 'Hangup' : 'Call', - child: new Icon(_inCalling ? Icons.call_end : Icons.phone), - ), - ); - } -} diff --git a/example/lib/src/basic_sample/loopback_sample.dart b/example/lib/src/basic_sample/loopback_sample.dart deleted file mode 100644 index 41d91d26dc..0000000000 --- a/example/lib/src/basic_sample/loopback_sample.dart +++ /dev/null @@ -1,231 +0,0 @@ -import 'package:flutter/material.dart'; -import 'package:webrtc/webrtc.dart'; -import 'dart:core'; -import 'dart:async'; - - -class LoopBackSample extends StatefulWidget { - - static String tag = 'loopback_sample'; - - @override - _MyAppState createState() => new _MyAppState(); -} - -class _MyAppState extends State { - MediaStream _localStream; - RTCPeerConnection _peerConnection; - final _localRenderer = new RTCVideoRenderer(); - final _remoteRenderer = new RTCVideoRenderer(); - bool _inCalling = false; - Timer _timer; - - @override - initState() { - super.initState(); - initRenderers(); - } - - @override - deactivate() { - super.deactivate(); - if (_inCalling) { - _hangUp(); - } - } - - initRenderers() async { - await _localRenderer.initialize(); - await _remoteRenderer.initialize(); - } - - void handleStatsReport(Timer timer) async { - if (_peerConnection != null) { - List reports = await _peerConnection.getStats(null); - reports.forEach((report) { - print("report => { "); - print(" id: " + report.id + ","); - print(" type: " + report.type + ","); - print(" timestamp: ${report.timestamp},"); - print(" values => {"); - report.values.forEach((key, value) { - print(" " + key + " : " + value + ", "); - }); - print(" }"); - print("}"); - }); - } - } - - _onSignalingState(RTCSignalingState state) { - print(state); - } - - _onIceGatheringState(RTCIceGatheringState state) { - print(state); - } - - _onIceConnectionState(RTCIceConnectionState state) { - print(state); - } - - _onAddStream(MediaStream stream) { - print('addStream: ' + stream.id); - _remoteRenderer.srcObject = stream; - } - - _onRemoveStream(MediaStream stream) { - _remoteRenderer.srcObject = null; - } - - _onCandidate(RTCIceCandidate candidate) { - print('onCandidate: ' + candidate.candidate); - _peerConnection.addCandidate(candidate); - } - - _onRenegotiationNeeded() { - print('RenegotiationNeeded'); - } - - // Platform messages are asynchronous, so we initialize in an async method. - _makeCall() async { - final Map mediaConstraints = { - "audio": true, - "video": { - "mandatory": { - "minWidth": - '640', // Provide your own width, height and frame rate here - "minHeight": '480', - "minFrameRate": '30', - }, - "facingMode": "user", - "optional": [], - } - }; - - Map configuration = { - "iceServers": [ - {"url": "stun:stun.l.google.com:19302"}, - ] - }; - - final Map offer_sdp_constraints = { - "mandatory": { - "OfferToReceiveAudio": true, - "OfferToReceiveVideo": true, - }, - "optional": [], - }; - - final Map loopback_constraints = { - "mandatory": {}, - "optional": [ - {"DtlsSrtpKeyAgreement": false}, - ], - }; - - if (_peerConnection != null) return; - - try { - _localStream = await navigator.getUserMedia(mediaConstraints); - _localRenderer.srcObject = _localStream; - - _peerConnection = - await createPeerConnection(configuration, loopback_constraints); - - _peerConnection.onSignalingState = _onSignalingState; - _peerConnection.onIceGatheringState = _onIceGatheringState; - _peerConnection.onIceConnectionState = _onIceConnectionState; - _peerConnection.onAddStream = _onAddStream; - _peerConnection.onRemoveStream = _onRemoveStream; - _peerConnection.onIceCandidate = _onCandidate; - _peerConnection.onRenegotiationNeeded = _onRenegotiationNeeded; - - _peerConnection.addStream(_localStream); - RTCSessionDescription description = - await _peerConnection.createOffer(offer_sdp_constraints); - print(description.sdp); - _peerConnection.setLocalDescription(description); - //change for loopback. - description.type = 'answer'; - _peerConnection.setRemoteDescription(description); - } catch (e) { - print(e.toString()); - } - if (!mounted) return; - - _timer = new Timer.periodic(Duration(seconds: 1), handleStatsReport); - - setState(() { - _inCalling = true; - }); - } - - _hangUp() async { - try { - await _localStream.dispose(); - await _peerConnection.close(); - _peerConnection = null; - _localRenderer.srcObject = null; - _remoteRenderer.srcObject = null; - } catch (e) { - print(e.toString()); - } - setState(() { - _inCalling = false; - }); - } - - @override - Widget build(BuildContext context) { - return - new Scaffold( - appBar: new AppBar( - title: new Text('LoopBack example'), - ), - body: new OrientationBuilder( - builder: (context, orientation) { - return new Center( - child: new Container( - decoration: new BoxDecoration(color: Colors.white), - child: new Stack( - children: [ - new Align( - alignment: orientation == Orientation.portrait - ? const FractionalOffset(0.5, 0.1) - : const FractionalOffset(0.0, 0.5), - child: new Container( - margin: new EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: 320.0, - height: 240.0, - child: new RTCVideoView(_localRenderer), - decoration: new BoxDecoration(color: Colors.black54), - ), - ), - new Align( - alignment: orientation == Orientation.portrait - ? const FractionalOffset(0.5, 0.9) - : const FractionalOffset(1.0, 0.5), - child: new Container( - margin: new EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: 320.0, - height: 240.0, - child: new RTCVideoView(_remoteRenderer), - decoration: new BoxDecoration(color: Colors.black54), - ), - ), - ], - ), - ), - ); - }, - ), - floatingActionButton: new FloatingActionButton( - onPressed: _inCalling ? _hangUp : _makeCall, - tooltip: _inCalling ? 'Hangup' : 'Call', - child: new Icon(_inCalling ? Icons.call_end : Icons.phone), - ), - ); - - } -} diff --git a/example/lib/src/call_sample/call_sample.dart b/example/lib/src/call_sample/call_sample.dart deleted file mode 100644 index 9796539acc..0000000000 --- a/example/lib/src/call_sample/call_sample.dart +++ /dev/null @@ -1,178 +0,0 @@ -import 'package:flutter/material.dart'; -import 'dart:io'; -import 'dart:core'; -import 'signaling.dart'; -import 'package:webrtc/webrtc.dart'; - -class CallSample extends StatefulWidget { - static String tag = 'call_sample'; - - final String ip; - - CallSample({Key key, @required this.ip}) : super(key: key); - - @override - _CallSampleState createState() => new _CallSampleState(serverIP: ip); -} - -class _CallSampleState extends State { - Signaling _signaling; - String _displayName = - Platform.localHostname + '(' + Platform.operatingSystem + ")"; - List _peers; - var _self_id; - RTCVideoRenderer _localRenderer = new RTCVideoRenderer(); - RTCVideoRenderer _remoteRenderer = new RTCVideoRenderer(); - bool _inCalling = false; - final String serverIP; - - _CallSampleState({Key key, @required this.serverIP}); - - @override - initState() { - super.initState(); - initRenderers(); - _connect(); - } - - initRenderers() async { - await _localRenderer.initialize(); - await _remoteRenderer.initialize(); - } - - @override - deactivate() { - super.deactivate(); - if (_signaling != null) _signaling.close(); - } - - void _connect() async { - if (_signaling == null) { - _signaling = new Signaling('ws://' + serverIP + ':4442', _displayName) - ..connect(); - - _signaling.onStateChange = (SignalingState state) { - switch (state) { - case SignalingState.CallStateNew: - this.setState(() { - _inCalling = true; - }); - break; - case SignalingState.CallStateBye: - this.setState(() { - _localRenderer.srcObject = null; - _remoteRenderer.srcObject = null; - _inCalling = false; - }); - break; - } - }; - - _signaling.onPeersUpdate = ((event) { - this.setState(() { - _self_id = event['self']; - _peers = event['peers']; - }); - }); - - _signaling.onLocalStream = ((stream) { - _localRenderer.srcObject = stream; - }); - - _signaling.onAddRemoteStream = ((stream) { - _remoteRenderer.srcObject = stream; - }); - - _signaling.onRemoveRemoteStream = ((stream) { - _remoteRenderer.srcObject = null; - }); - } - } - - _invitePeer(context, peerId) async { - if (_signaling != null && peerId != _self_id) { - _signaling.invite(peerId, 'video'); - } - } - - _hangUp() { - if (_signaling != null) { - _signaling.bye(); - } - } - - _buildRow(context, peer) { - var self = (peer['id'] == _self_id); - return ListBody(children: [ - ListTile( - title: Text(self - ? peer['name'] + '[Your self]' - : peer['name'] + '[' + peer['user_agent'] + ']'), - onTap: () => _invitePeer(context, peer['id']), - trailing: Icon(Icons.videocam), - subtitle: Text('id: ' + peer['id']), - ), - Divider() - ]); - } - - @override - Widget build(BuildContext context) { - return new Scaffold( - appBar: new AppBar( - title: new Text('P2P Call Sample'), - actions: [ - IconButton( - icon: const Icon(Icons.settings), - onPressed: null, - tooltip: 'setup', - ), - ], - ), - floatingActionButton: _inCalling - ? FloatingActionButton( - onPressed: _hangUp, - tooltip: 'Hangup', - child: new Icon(Icons.call_end), - ) - : null, - body: _inCalling - ? OrientationBuilder(builder: (context, orientation) { - return new Container( - child: new Stack(children: [ - new Positioned( - left: 0.0, - right: 0.0, - top: 0.0, - bottom: 0.0, - child: new Container( - margin: new EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), - width: MediaQuery.of(context).size.width, - height: MediaQuery.of(context).size.height, - child: new RTCVideoView(_remoteRenderer), - decoration: new BoxDecoration(color: Colors.black54), - )), - new Positioned( - left: 20.0, - top: 20.0, - child: new Container( - width: orientation == Orientation.portrait ? 90.0 : 120.0, - height: - orientation == Orientation.portrait ? 120.0 : 90.0, - child: new RTCVideoView(_localRenderer), - decoration: new BoxDecoration(color: Colors.black54), - ), - ), - ]), - ); - }) - : new ListView.builder( - shrinkWrap: true, - padding: const EdgeInsets.all(0.0), - itemCount: (_peers != null ? _peers.length : 0), - itemBuilder: (context, i) { - return _buildRow(context, _peers[i]); - }), - ); - } -} diff --git a/example/lib/src/call_sample/random_string.dart b/example/lib/src/call_sample/random_string.dart deleted file mode 100644 index 2e45184c47..0000000000 --- a/example/lib/src/call_sample/random_string.dart +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) 2016, Damon Douglas. All rights reserved. Use of this source code -// is governed by a BSD-style license that can be found in the LICENSE file. - -/// Simple library for generating random ascii strings. -/// -/// More dartdocs go here. -/// -/// -/// A simple usage example: -/// -/// import 'package:random_string/random_string.dart' as random; -/// main() { -/// print(randomBetween(10,20)); // some integer between 10 and 20 -/// print(randomNumeric(4)); // sequence of 4 random numbers i.e. 3259 -/// print(randomString(10)); // random sequence of 10 characters i.e. e~f93(4l- -/// print(randomAlpha(5)); // random sequence of 5 alpha characters i.e. aRztC -/// print(randomAlphaNumeric(10)); // random sequence of 10 alpha numeric i.e. aRztC1y32B -/// } - -library random_string; - -import 'dart:math'; - -const ASCII_START = 33; -const ASCII_END = 126; -const NUMERIC_START = 48; -const NUMERIC_END = 57; -const LOWER_ALPHA_START = 97; -const LOWER_ALPHA_END = 122; -const UPPER_ALPHA_START = 65; -const UPPER_ALPHA_END = 90; - -/// Generates a random integer where [from] <= [to]. -int randomBetween(int from, int to) { - if (from > to) throw new Exception('$from cannot be > $to'); - var rand = new Random(); - return ((to - from) * rand.nextDouble()).toInt() + from; -} - -/// Generates a random string of [length] with characters -/// between ascii [from] to [to]. -/// Defaults to characters of ascii '!' to '~'. -String randomString(int length, {int from: ASCII_START, int to: ASCII_END}) { - return new String.fromCharCodes( - new List.generate(length, (index) => randomBetween(from, to))); -} - -/// Generates a random string of [length] with only numeric characters. -String randomNumeric(int length) => - randomString(length, from: NUMERIC_START, to: NUMERIC_END); -/* -/// Generates a random string of [length] with only alpha characters. -String randomAlpha(int length) { - var lowerAlphaLength = randomBetween(0, length); - var upperAlphaLength = length - lowerAlphaLength; - var lowerAlpha = randomString(lowerAlphaLength, - from: LOWER_ALPHA_START, to: LOWER_ALPHA_END); - var upperAlpha = randomString(upperAlphaLength, - from: UPPER_ALPHA_START, to: UPPER_ALPHA_END); - return randomMerge(lowerAlpha, upperAlpha); -} - -/// Generates a random string of [length] with alpha-numeric characters. -String randomAlphaNumeric(int length) { - var alphaLength = randomBetween(0, length); - var numericLength = length - alphaLength; - var alpha = randomAlpha(alphaLength); - var numeric = randomNumeric(numericLength); - return randomMerge(alpha, numeric); -} - -/// Merge [a] with [b] and scramble characters. -String randomMerge(String a, String b) { - var mergedCodeUnits = new List.from("$a$b".codeUnits); - mergedCodeUnits.shuffle(); - return new String.fromCharCodes(mergedCodeUnits); -}*/ diff --git a/example/lib/src/call_sample/settings.dart b/example/lib/src/call_sample/settings.dart deleted file mode 100644 index a96483649b..0000000000 --- a/example/lib/src/call_sample/settings.dart +++ /dev/null @@ -1,38 +0,0 @@ -import 'package:flutter/material.dart'; -import 'dart:core'; - -class CallSettings extends StatefulWidget { - static String tag = 'call_settings'; - - @override - _CallSettingsState createState() => new _CallSettingsState(); -} - -class _CallSettingsState extends State { - @override - initState() { - super.initState(); - } - - @override - deactivate() { - super.deactivate(); - - } - - @override - Widget build(BuildContext context) { - return new Scaffold( - appBar: new AppBar( - title: new Text('Settings'), - ), - body: new OrientationBuilder( - builder: (context, orientation) { - return new Center( - child: Text("settings") - ); - }, - ), - ); - } -} diff --git a/example/lib/src/call_sample/signaling.dart b/example/lib/src/call_sample/signaling.dart deleted file mode 100644 index 6c5d7409eb..0000000000 --- a/example/lib/src/call_sample/signaling.dart +++ /dev/null @@ -1,376 +0,0 @@ -import 'dart:convert'; -import 'dart:async'; -import 'dart:io'; -import 'package:webrtc/webrtc.dart'; -import 'random_string.dart'; - -enum SignalingState { - CallStateNew, - CallStateRinging, - CallStateInvite, - CallStateConnected, - CallStateBye, - ConnectionOpen, - ConnectionClosed, - ConnectionError, -} - -/* - * 回调类型定义. - */ -typedef void SignalingStateCallback(SignalingState state); -typedef void StreamStateCallback(MediaStream stream); -typedef void OtherEventCallback(dynamic event); -typedef void DataChannelMessageCallback(RTCDataChannel dc, data); - -class Signaling { - String _self_id = randomNumeric(6); - var _socket; - var _session_id; - var _url; - var _name; - var _peerConnections = new Map(); - var _daChannels = new Map(); - Timer _timer; - MediaStream _localStream; - List _remoteStreams; - SignalingStateCallback onStateChange; - StreamStateCallback onLocalStream; - StreamStateCallback onAddRemoteStream; - StreamStateCallback onRemoveRemoteStream; - OtherEventCallback onPeersUpdate; - DataChannelMessageCallback onDataChannel; - - Map _iceServers = { - 'iceServers': [ - {'url': 'stun:stun.l.google.com:19302'}, - ] - }; - - final Map _config = { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': true}, - ], - }; - - final Map _constraints = { - 'mandatory': { - 'OfferToReceiveAudio': true, - 'OfferToReceiveVideo': true, - }, - 'optional': [], - }; - - Signaling(this._url, this._name); - - close() { - - if (_localStream != null) { - _localStream.dispose(); - _localStream = null; - } - - _peerConnections.forEach((key, pc){ - pc.close(); - }); - if (_socket != null) - _socket.close(); - } - - void invite(String peer_id, String media) { - this._session_id = this._self_id + '-' + peer_id; - - if (this.onStateChange != null) { - this.onStateChange(SignalingState.CallStateNew); - } - - _createPeerConnection(peer_id, media).then((pc) { - _peerConnections[peer_id] = pc; - _createOffer(peer_id, pc, media); - }); - } - - void bye() { - _send('bye', { - 'session_id': this._session_id, - 'from': this._self_id, - }); - } - - void onMessage(message) async { - Map mapData = message; - var data = mapData['data']; - - switch(mapData['type']){ - - case 'peers': - { - List peers = data; - if(this.onPeersUpdate != null) { - Map event = new Map(); - event['self'] = _self_id; - event['peers'] = peers; - this.onPeersUpdate(event); - } - } - break; - case 'offer': - { - var id = data['from']; - var description = data['description']; - var media = data['media']; - var session_id = data['session_id']; - this._session_id = session_id; - - if (this.onStateChange != null) { - this.onStateChange(SignalingState.CallStateNew); - } - - _createPeerConnection(id, media).then((pc) { - _peerConnections[id] = pc; - pc.setRemoteDescription( - new RTCSessionDescription(description['sdp'], description['type'])); - _createAnswer(id, pc); - }); - } - break; - case 'answer': - { - var id = data['from']; - var description = data['description']; - - var pc = _peerConnections[id]; - if (pc != null) { - pc.setRemoteDescription( - new RTCSessionDescription(description['sdp'], description['type'])); - } - } - break; - case 'candidate': - { - var id = data['from']; - var candidateMap = data['candidate']; - var pc = _peerConnections[id]; - - if (pc != null) { - RTCIceCandidate candidate = new RTCIceCandidate( - candidateMap['candidate'], - candidateMap['sdpMid'], - candidateMap['sdpMLineIndex']); - pc.addCandidate(candidate); - } - } - break; - case 'leave': - { - var id = data; - _peerConnections.remove(id); - _daChannels.remove(id); - - if (_localStream != null) { - _localStream.dispose(); - _localStream = null; - } - - var pc = _peerConnections[id]; - if (pc != null) { - pc.close(); - _peerConnections.remove(id); - } - this._session_id = null; - if (this.onStateChange != null) { - this.onStateChange(SignalingState.CallStateBye); - } - } - break; - case 'bye': - { var from = data['from']; - var to = data['to']; - var session_id = data['session_id']; - print('bye: ' + session_id); - - if (_localStream != null) { - _localStream.dispose(); - _localStream = null; - } - - - var pc = _peerConnections[to]; - if (pc != null) { - pc.close(); - _peerConnections.remove(to); - } - this._session_id = null; - if (this.onStateChange != null) { - this.onStateChange(SignalingState.CallStateBye); - } - } - break; - case 'keepalive': - { - print('keepalive response!'); - } - break; - default: - break; - } - } - - void connect() async { - try { - _socket = await WebSocket.connect(_url); - - if (this.onStateChange != null) { - this.onStateChange(SignalingState.ConnectionOpen); - } - - _socket.listen((data) { - print('Recivied data: ' + data); - this.onMessage(JSON.decode(data)); - }, onDone: () { - print('Closed by server!'); - if (this.onStateChange != null) { - this.onStateChange(SignalingState.ConnectionClosed); - } - }); - - _send('new', { - 'name': _name, - 'id': _self_id, - 'user_agent': 'flutter-webrtc/'+ Platform.operatingSystem +'-plugin 0.0.1' - }); - }catch(e){ - if(this.onStateChange != null){ - this.onStateChange(SignalingState.ConnectionError); - } - } - } - - Future createStream() async { - final Map mediaConstraints = { - 'audio': true, - 'video': { - 'mandatory': { - 'minWidth': - '640', // Provide your own width, height and frame rate here - 'minHeight': '480', - 'minFrameRate': '30', - }, - 'facingMode': 'user', - 'optional': [], - } - }; - - MediaStream stream = await navigator.getUserMedia(mediaConstraints); - if(this.onLocalStream != null){ - this.onLocalStream(stream); - } - return stream; - } - - _createPeerConnection(id, media) async { - _localStream = await createStream(); - RTCPeerConnection pc = await createPeerConnection(_iceServers, _config); - pc.addStream(_localStream); - pc.onIceCandidate = (candidate) { - _send('candidate', { - 'to': id, - 'candidate': { - 'sdpMLineIndex': candidate.sdpMlineIndex, - 'sdpMid': candidate.sdpMid, - 'candidate': candidate.candidate, - }, - 'session_id': this._session_id, - }); - }; - - pc.onAddStream = ((stream) { - if(this.onAddRemoteStream != null) - this.onAddRemoteStream(stream); - //_remoteStreams.add(stream); - }); - - pc.onRemoveStream = (stream) { - if(this.onRemoveRemoteStream != null) - this.onRemoveRemoteStream(stream); - _remoteStreams.removeWhere((it) { - return (it.id == stream.id); - }); - }; - - pc.onDataChannel = (channel) { - _addDataChannel(id, channel); - }; - - return pc; - } - - _addDataChannel(id, RTCDataChannel channel) { - channel.onDataChannelState = (e) {}; - channel.onMessage = (data) { - if(this.onDataChannel != null) - this.onDataChannel(channel, data); - }; - _daChannels[id] = channel; - } - - _createDataChannel(id, RTCPeerConnection pc, {label: 'fileTransfer'}) async { - RTCDataChannelInit dataChannelDict = new RTCDataChannelInit(); - RTCDataChannel channel = await pc.createDataChannel(label, dataChannelDict); - _addDataChannel(id, channel); - } - - _createOffer(String id, RTCPeerConnection pc, String media) async { - try { - RTCSessionDescription s = await pc.createOffer(_constraints); - pc.setLocalDescription(s); - _send('offer', { - 'to': id, - 'description': {'sdp': s.sdp, 'type': s.type}, - 'session_id': this._session_id, - 'media': media, - }); - } catch (e) { - print(e.toString()); - } - } - - _createAnswer(String id, RTCPeerConnection pc) async { - try { - RTCSessionDescription s = await pc.createAnswer(_constraints); - pc.setLocalDescription(s); - _send('answer', { - 'to': id, - 'description': {'sdp': s.sdp, 'type': s.type}, - 'session_id': this._session_id, - }); - } catch (e) { - print(e.toString()); - } - } - - _send(event, data) { - data['type'] = event; - if (_socket != null) _socket.add(JSON.encode(data)); - print('send: ' + JSON.encode(data)); - } - - _handleStatsReport(Timer timer, pc) async { - if (pc != null) { - List reports = await pc.getStats(null); - reports.forEach((report) { - print("report => { "); - print(" id: " + report.id + ","); - print(" type: " + report.type + ","); - print(" timestamp: ${report.timestamp},"); - print(" values => {"); - report.values.forEach((key, value) { - print(" " + key + " : " + value + ", "); - }); - print(" }"); - print("}"); - }); - } - } -} diff --git a/example/lib/src/capture_frame_sample.dart b/example/lib/src/capture_frame_sample.dart new file mode 100644 index 0000000000..56a3f77464 --- /dev/null +++ b/example/lib/src/capture_frame_sample.dart @@ -0,0 +1,57 @@ +import 'dart:typed_data'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class CaptureFrameSample extends StatefulWidget { + @override + State createState() => _CaptureFrameSample(); +} + +class _CaptureFrameSample extends State { + Uint8List? _data; + + void _captureFrame() async { + final stream = await navigator.mediaDevices.getUserMedia({ + 'audio': false, + 'video': true, + }); + + final track = stream.getVideoTracks().first; + final buffer = await track.captureFrame(); + + stream.getTracks().forEach((track) => track.stop()); + + setState(() { + _data = buffer.asUint8List(); + }); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Capture Frame'), + ), + floatingActionButton: FloatingActionButton( + onPressed: _captureFrame, + child: Icon(Icons.camera_alt_outlined), + ), + body: Builder(builder: (context) { + final data = _data; + + if (data == null) { + return Container(); + } + return Center( + child: Image.memory( + data, + fit: BoxFit.contain, + width: double.infinity, + height: double.infinity, + ), + ); + }), + ); + } +} diff --git a/example/lib/src/device_enumeration_sample.dart b/example/lib/src/device_enumeration_sample.dart new file mode 100644 index 0000000000..4630001572 --- /dev/null +++ b/example/lib/src/device_enumeration_sample.dart @@ -0,0 +1,447 @@ +import 'dart:core'; +import 'package:collection/collection.dart'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:permission_handler/permission_handler.dart'; + +class VideoSize { + VideoSize(this.width, this.height); + + factory VideoSize.fromString(String size) { + final parts = size.split('x'); + return VideoSize(int.parse(parts[0]), int.parse(parts[1])); + } + final int width; + final int height; + + @override + String toString() { + return '$width x $height'; + } +} + +/* + * DeviceEnumerationSample + */ +class DeviceEnumerationSample extends StatefulWidget { + static String tag = 'DeviceEnumerationSample'; + + @override + _DeviceEnumerationSampleState createState() => + _DeviceEnumerationSampleState(); +} + +class _DeviceEnumerationSampleState extends State { + MediaStream? _localStream; + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); + final RTCVideoRenderer _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + List _devices = []; + + List get audioInputs => + _devices.where((device) => device.kind == 'audioinput').toList(); + + List get audioOutputs => + _devices.where((device) => device.kind == 'audiooutput').toList(); + + List get videoInputs => + _devices.where((device) => device.kind == 'videoinput').toList(); + + String? _selectedVideoInputId; + String? _selectedAudioInputId; + + MediaDeviceInfo get selectedAudioInput => audioInputs.firstWhere( + (device) => device.deviceId == _selectedVideoInputId, + orElse: () => audioInputs.first); + + String? _selectedVideoFPS = '30'; + + VideoSize _selectedVideoSize = VideoSize(1280, 720); + + @override + void initState() { + super.initState(); + + initRenderers(); + loadDevices(); + navigator.mediaDevices.ondevicechange = (event) { + loadDevices(); + }; + } + + @override + void deactivate() { + super.deactivate(); + _stop(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; + } + + RTCPeerConnection? pc1; + RTCPeerConnection? pc2; + var senders = []; + + Future initPCs() async { + pc2 ??= await createPeerConnection({}); + pc1 ??= await createPeerConnection({}); + + pc2?.onTrack = (event) { + if (event.track.kind == 'video') { + _remoteRenderer.srcObject = event.streams[0]; + setState(() {}); + } + }; + + pc2?.onConnectionState = (state) { + print('connectionState $state'); + }; + + pc2?.onIceConnectionState = (state) { + print('iceConnectionState $state'); + }; + + await pc2?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, + init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + await pc2?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + + pc1!.onIceCandidate = (candidate) => pc2!.addCandidate(candidate); + pc2!.onIceCandidate = (candidate) => pc1!.addCandidate(candidate); + } + + Future _negotiate() async { + var offer = await pc1?.createOffer(); + await pc1?.setLocalDescription(offer!); + await pc2?.setRemoteDescription(offer!); + var answer = await pc2?.createAnswer(); + await pc2?.setLocalDescription(answer!); + await pc1?.setRemoteDescription(answer!); + } + + Future stopPCs() async { + await pc1?.close(); + await pc2?.close(); + pc1 = null; + pc2 = null; + } + + Future loadDevices() async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + //Ask for runtime permissions if necessary. + var status = await Permission.bluetooth.request(); + if (status.isPermanentlyDenied) { + print('BLEpermdisabled'); + } + + status = await Permission.bluetoothConnect.request(); + if (status.isPermanentlyDenied) { + print('ConnectPermdisabled'); + } + } + final devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _devices = devices; + }); + } + + Future _selectVideoFps(String fps) async { + _selectedVideoFPS = fps; + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectVideoSize(String size) async { + _selectedVideoSize = VideoSize.fromString(size); + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectAudioInput(String? deviceId) async { + _selectedAudioInputId = deviceId; + if (!_inCalling) { + return; + } + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': { + if (_selectedAudioInputId != null && kIsWeb) + 'deviceId': _selectedAudioInputId, + if (_selectedAudioInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedAudioInputId} + ], + }, + 'video': false, + }); + + // replace track. + var newTrack = newLocalStream.getAudioTracks().first; + print('track.settings ' + newTrack.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'audio'); + await sender?.replaceTrack(newTrack); + } + + Future _selectAudioOutput(String? deviceId) async { + if (!_inCalling) { + return; + } + await _localRenderer.audioOutput(deviceId!); + } + + var _speakerphoneOn = false; + + Future _setSpeakerphoneOn() async { + _speakerphoneOn = !_speakerphoneOn; + await Helper.setSpeakerphoneOn(_speakerphoneOn); + setState(() {}); + } + + Future _selectVideoInput(String? deviceId) async { + _selectedVideoInputId = deviceId; + if (!_inCalling) { + return; + } + // 2) replace track. + // stop old track. + _localRenderer.srcObject = null; + + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': false, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + _localStream = newLocalStream; + _localRenderer.srcObject = _localStream; + // replace track. + var newTrack = _localStream?.getVideoTracks().first; + print('track.settings ' + newTrack!.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'video'); + var params = sender!.parameters; + print('params degradationPreference' + + params.degradationPreference.toString()); + params.degradationPreference = RTCDegradationPreference.MAINTAIN_RESOLUTION; + await sender.setParameters(params); + await sender.replaceTrack(newTrack); + } + + Future initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + Future _start() async { + try { + _localStream = await navigator.mediaDevices.getUserMedia({ + 'audio': true, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + _localRenderer.srcObject = _localStream; + _inCalling = true; + + await initPCs(); + + _localStream?.getTracks().forEach((track) async { + var rtpSender = await pc1?.addTrack(track, _localStream!); + print('track.settings ' + track.getSettings().toString()); + senders.add(rtpSender!); + }); + + await _negotiate(); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + Future _stop() async { + try { + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + _localStream = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + senders.clear(); + _inCalling = false; + await stopPCs(); + _speakerphoneOn = false; + await Helper.setSpeakerphoneOn(_speakerphoneOn); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('DeviceEnumerationSample'), + actions: [ + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + if (!WebRTC.platformIsMobile) + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + if (!kIsWeb && WebRTC.platformIsMobile) + IconButton( + disabledColor: Colors.grey, + onPressed: _setSpeakerphoneOn, + icon: Icon( + _speakerphoneOn ? Icons.speaker_phone : Icons.phone_android), + tooltip: 'Switch SpeakerPhone', + ), + PopupMenuButton( + onSelected: _selectVideoInput, + icon: Icon(Icons.switch_camera), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'videoinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + PopupMenuButton( + onSelected: _selectVideoFps, + icon: Icon(Icons.menu), + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + value: _selectedVideoFPS, + child: Text('Select FPS ($_selectedVideoFPS)'), + ), + PopupMenuDivider(), + ...['8', '15', '30', '60'] + .map((fps) => PopupMenuItem( + value: fps, + child: Text(fps), + )) + .toList() + ]; + }, + ), + PopupMenuButton( + onSelected: _selectVideoSize, + icon: Icon(Icons.screenshot_monitor), + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + value: _selectedVideoSize.toString(), + child: Text('Select Video Size ($_selectedVideoSize)'), + ), + PopupMenuDivider(), + ...['320x180', '640x360', '1280x720', '1920x1080'] + .map((fps) => PopupMenuItem( + value: fps, + child: Text(fps), + )) + .toList() + ]; + }, + ), + ], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + width: MediaQuery.of(context).size.width, + color: Colors.white10, + child: Row( + children: [ + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer), + ), + ), + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_remoteRenderer), + ), + ), + ], + )), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: () { + _inCalling ? _stop() : _start(); + }, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/get_display_media_sample.dart b/example/lib/src/get_display_media_sample.dart new file mode 100644 index 0000000000..cb74238fb6 --- /dev/null +++ b/example/lib/src/get_display_media_sample.dart @@ -0,0 +1,179 @@ +import 'dart:core'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_background/flutter_background.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:flutter_webrtc_example/src/widgets/screen_select_dialog.dart'; + +/* + * getDisplayMedia sample + */ +class GetDisplayMediaSample extends StatefulWidget { + static String tag = 'get_display_media_sample'; + + @override + _GetDisplayMediaSampleState createState() => _GetDisplayMediaSampleState(); +} + +class _GetDisplayMediaSampleState extends State { + MediaStream? _localStream; + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); + bool _inCalling = false; + DesktopCapturerSource? selected_source_; + + @override + void initState() { + super.initState(); + initRenderers(); + } + + @override + void deactivate() { + super.deactivate(); + if (_inCalling) { + _stop(); + } + _localRenderer.dispose(); + } + + Future initRenderers() async { + await _localRenderer.initialize(); + } + + Future selectScreenSourceDialog(BuildContext context) async { + if (WebRTC.platformIsDesktop) { + final source = await showDialog( + context: context, + builder: (context) => ScreenSelectDialog(), + ); + if (source != null) { + await _makeCall(source); + } + } else { + if (WebRTC.platformIsAndroid) { + // Android specific + Future requestBackgroundPermission([bool isRetry = false]) async { + // Required for android screenshare. + try { + var hasPermissions = await FlutterBackground.hasPermissions; + if (!isRetry) { + const androidConfig = FlutterBackgroundAndroidConfig( + notificationTitle: 'Screen Sharing', + notificationText: 'LiveKit Example is sharing the screen.', + notificationImportance: AndroidNotificationImportance.normal, + notificationIcon: AndroidResource( + name: 'livekit_ic_launcher', defType: 'mipmap'), + ); + hasPermissions = await FlutterBackground.initialize( + androidConfig: androidConfig); + } + if (hasPermissions && + !FlutterBackground.isBackgroundExecutionEnabled) { + await FlutterBackground.enableBackgroundExecution(); + } + } catch (e) { + if (!isRetry) { + return await Future.delayed(const Duration(seconds: 1), + () => requestBackgroundPermission(true)); + } + print('could not publish video: $e'); + } + } + + await requestBackgroundPermission(); + } + await _makeCall(null); + } + } + + // Platform messages are asynchronous, so we initialize in an async method. + Future _makeCall(DesktopCapturerSource? source) async { + setState(() { + selected_source_ = source; + }); + + try { + var stream = + await navigator.mediaDevices.getDisplayMedia({ + 'video': selected_source_ == null + ? true + : { + 'deviceId': {'exact': selected_source_!.id}, + 'mandatory': {'frameRate': 30.0} + } + }); + stream.getVideoTracks()[0].onEnded = () { + print( + 'By adding a listener on onEnded you can: 1) catch stop video sharing on Web'); + }; + + _localStream = stream; + _localRenderer.srcObject = _localStream; + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + Future _stop() async { + try { + if (kIsWeb) { + _localStream?.getTracks().forEach((track) => track.stop()); + } + await _localStream?.dispose(); + _localStream = null; + _localRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + } + + Future _hangUp() async { + await _stop(); + setState(() { + _inCalling = false; + }); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('GetDisplayMedia source: ' + + (selected_source_ != null ? selected_source_!.name : '')), + actions: [], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + width: MediaQuery.of(context).size.width, + color: Colors.white10, + child: Stack(children: [ + if (_inCalling) + Container( + margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), + width: MediaQuery.of(context).size.width, + height: MediaQuery.of(context).size.height, + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer), + ) + ]), + )); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: () { + _inCalling ? _hangUp() : selectScreenSourceDialog(context); + }, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart new file mode 100644 index 0000000000..9c2dea81d0 --- /dev/null +++ b/example/lib/src/get_user_media_sample.dart @@ -0,0 +1,310 @@ +import 'dart:core'; +import 'dart:io'; +import 'dart:math'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:gallery_saver_plus/gallery_saver.dart'; +import 'package:path_provider/path_provider.dart'; + +/* + * getUserMedia sample + */ +class GetUserMediaSample extends StatefulWidget { + static String tag = 'get_usermedia_sample'; + + @override + _GetUserMediaSampleState createState() => _GetUserMediaSampleState(); +} + +class _GetUserMediaSampleState extends State { + MediaStream? _localStream; + final _localRenderer = RTCVideoRenderer(); + bool _inCalling = false; + bool _isTorchOn = false; + bool _isFrontCamera = true; + MediaRecorder? _mediaRecorder; + String? _mediaRecorderFilePath; + + bool get _isRec => _mediaRecorder != null; + + List? _mediaDevicesList; + + @override + void initState() { + super.initState(); + initRenderers(); + navigator.mediaDevices.ondevicechange = (event) async { + print('++++++ ondevicechange ++++++'); + _mediaDevicesList = await navigator.mediaDevices.enumerateDevices(); + }; + } + + @override + void deactivate() { + super.deactivate(); + if (_inCalling) { + _hangUp(); + } + _localRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; + } + + void initRenderers() async { + await _localRenderer.initialize(); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _makeCall() async { + final mediaConstraints = { + 'audio': false, + 'video': { + 'mandatory': { + 'minWidth': + '640', // Provide your own width, height and frame rate here + 'minHeight': '480', + 'minFrameRate': '30', + }, + 'facingMode': 'user', + 'optional': [], + } + }; + + try { + var stream = await navigator.mediaDevices.getUserMedia(mediaConstraints); + _mediaDevicesList = await navigator.mediaDevices.enumerateDevices(); + _localStream = stream; + _localRenderer.srcObject = _localStream; + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + void _hangUp() async { + try { + if (kIsWeb) { + _localStream?.getTracks().forEach((track) => track.stop()); + } + await _localStream?.dispose(); + _localRenderer.srcObject = null; + setState(() { + _inCalling = false; + }); + } catch (e) { + print(e.toString()); + } + } + + void _startRecording() async { + if (_localStream == null) throw Exception('Stream is not initialized'); + // TODO(rostopira): request write storage permission + final timestamp = DateTime.now().millisecondsSinceEpoch; + + if (!(Platform.isAndroid || Platform.isIOS || Platform.isMacOS)) { + throw 'Unsupported platform'; + } + + final tempDir = await getTemporaryDirectory(); + if (!(await tempDir.exists())) { + await tempDir.create(recursive: true); + } + + _mediaRecorderFilePath = '${tempDir.path}/$timestamp.mp4'; + + if (_mediaRecorderFilePath == null) { + throw Exception('Can\'t find storagePath'); + } + + final file = File(_mediaRecorderFilePath!); + if (await file.exists()) { + await file.delete(); + } + _mediaRecorder = MediaRecorder(albumName: 'FlutterWebRTC'); + setState(() {}); + + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + + await _mediaRecorder!.start( + _mediaRecorderFilePath!, + videoTrack: videoTrack, + audioChannel: RecorderAudioChannel.OUTPUT, + ); + } + + void _stopRecording() async { + if (_mediaRecorderFilePath == null) { + return; + } + + // album name works only for android, for ios use gallerySaver + await _mediaRecorder?.stop(); + setState(() { + _mediaRecorder = null; + }); + + // this is only for ios, android already saves to albumName + await GallerySaver.saveVideo( + _mediaRecorderFilePath!, + albumName: 'FlutterWebRTC', + ); + + _mediaRecorderFilePath = null; + } + + void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) { + final point = Point( + details.localPosition.dx / constraints.maxWidth, + details.localPosition.dy / constraints.maxHeight, + ); + Helper.setFocusPoint(_localStream!.getVideoTracks().first, point); + Helper.setExposurePoint(_localStream!.getVideoTracks().first, point); + } + + void _toggleTorch() async { + if (_localStream == null) throw Exception('Stream is not initialized'); + + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + final has = await videoTrack.hasTorch(); + if (has) { + print('[TORCH] Current camera supports torch mode'); + setState(() => _isTorchOn = !_isTorchOn); + await videoTrack.setTorch(_isTorchOn); + print('[TORCH] Torch state is now ${_isTorchOn ? 'on' : 'off'}'); + } else { + print('[TORCH] Current camera does not support torch mode'); + } + } + + void setZoom(double zoomLevel) async { + if (_localStream == null) throw Exception('Stream is not initialized'); + // await videoTrack.setZoom(zoomLevel); //Use it after published webrtc_interface 1.1.1 + + // before the release, use can just call native method directly. + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + await Helper.setZoom(videoTrack, zoomLevel); + } + + void _switchCamera() async { + if (_localStream == null) throw Exception('Stream is not initialized'); + + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + await Helper.switchCamera(videoTrack); + setState(() { + _isFrontCamera = _isFrontCamera; + }); + } + + void _captureFrame() async { + if (_localStream == null) throw Exception('Stream is not initialized'); + + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + final frame = await videoTrack.captureFrame(); + await showDialog( + context: context, + builder: (context) => AlertDialog( + content: + Image.memory(frame.asUint8List(), height: 720, width: 1280), + actions: [ + TextButton( + onPressed: Navigator.of(context, rootNavigator: true).pop, + child: Text('OK'), + ) + ], + )); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('GetUserMedia API Test'), + actions: _inCalling + ? [ + IconButton( + icon: Icon(_isTorchOn ? Icons.flash_off : Icons.flash_on), + onPressed: _toggleTorch, + ), + IconButton( + icon: Icon(Icons.switch_video), + onPressed: _switchCamera, + ), + IconButton( + icon: Icon(Icons.camera), + onPressed: _captureFrame, + ), + IconButton( + icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record), + onPressed: _isRec ? _stopRecording : _startRecording, + ), + PopupMenuButton( + onSelected: _selectAudioOutput, + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + ] + : null, + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), + width: MediaQuery.of(context).size.width, + height: MediaQuery.of(context).size.height, + decoration: BoxDecoration(color: Colors.black54), + child: LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return GestureDetector( + onScaleStart: (details) {}, + onScaleUpdate: (details) { + if (details.scale != 1.0) { + setZoom(details.scale); + } + }, + onTapDown: (TapDownDetails details) => + onViewFinderTap(details, constraints), + child: RTCVideoView(_localRenderer, mirror: false), + ); + }), + )); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _hangUp : _makeCall, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } + + void _selectAudioOutput(String deviceId) { + _localRenderer.audioOutput(deviceId); + } +} diff --git a/example/lib/src/get_user_media_sample_web.dart b/example/lib/src/get_user_media_sample_web.dart new file mode 100644 index 0000000000..7df81cafad --- /dev/null +++ b/example/lib/src/get_user_media_sample_web.dart @@ -0,0 +1,207 @@ +// ignore: uri_does_not_exist +import 'dart:core'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +import 'package:web/web.dart' as web; + +/* + * getUserMedia sample + */ +class GetUserMediaSample extends StatefulWidget { + static String tag = 'get_usermedia_sample'; + + @override + _GetUserMediaSampleState createState() => _GetUserMediaSampleState(); +} + +class _GetUserMediaSampleState extends State { + MediaStream? _localStream; + final _localRenderer = RTCVideoRenderer(); + bool _inCalling = false; + MediaRecorder? _mediaRecorder; + + List? _cameras; + + bool get _isRec => _mediaRecorder != null; + List? cameras; + + @override + void initState() { + super.initState(); + initRenderers(); + + navigator.mediaDevices.enumerateDevices().then((md) { + setState(() { + cameras = md.where((d) => d.kind == 'videoinput').toList(); + }); + }); + } + + @override + void deactivate() { + super.deactivate(); + if (_inCalling) { + _stop(); + } + _localRenderer.dispose(); + } + + void initRenderers() async { + await _localRenderer.initialize(); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _makeCall() async { + final mediaConstraints = { + 'audio': true, + 'video': { + 'mandatory': { + 'minWidth': + '1280', // Provide your own width, height and frame rate here + 'minHeight': '720', + 'minFrameRate': '30', + }, + } + }; + + try { + var stream = await navigator.mediaDevices.getUserMedia(mediaConstraints); + _cameras = await Helper.cameras; + _localStream = stream; + _localRenderer.srcObject = _localStream; + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + Future _stop() async { + try { + if (kIsWeb) { + _localStream?.getTracks().forEach((track) => track.stop()); + } + await _localStream?.dispose(); + _localStream = null; + _localRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + } + + void _hangUp() async { + await _stop(); + setState(() { + _inCalling = false; + }); + } + + void _startRecording() async { + if (_localStream == null) throw Exception('Can\'t record without a stream'); + _mediaRecorder = MediaRecorder(); + setState(() {}); + _mediaRecorder?.startWeb(_localStream!); + } + + void _stopRecording() async { + final objectUrl = await _mediaRecorder?.stop(); + setState(() { + _mediaRecorder = null; + }); + print(objectUrl); + // ignore: unsafe_html + web.window.open(objectUrl, '_blank'); + } + + void _captureFrame() async { + if (_localStream == null) throw Exception('Can\'t record without a stream'); + final videoTrack = _localStream! + .getVideoTracks() + .firstWhere((track) => track.kind == 'video'); + final frame = await videoTrack.captureFrame(); + await showDialog( + context: context, + builder: (context) => AlertDialog( + content: + Image.memory(frame.asUint8List(), height: 720, width: 1280), + actions: [ + TextButton( + onPressed: Navigator.of(context, rootNavigator: true).pop, + child: Text('OK'), + ) + ], + )); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('GetUserMedia API Test Web'), + actions: _inCalling + ? [ + IconButton( + icon: Icon(Icons.camera), + onPressed: _captureFrame, + ), + IconButton( + icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record), + onPressed: _isRec ? _stopRecording : _startRecording, + ), + PopupMenuButton( + onSelected: _switchCamera, + itemBuilder: (BuildContext context) { + if (_cameras != null) { + return _cameras!.map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } else { + return []; + } + }, + ), + // IconButton( + // icon: Icon(Icons.settings), + // onPressed: _switchCamera, + // ) + ] + : null, + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + margin: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 0.0), + width: MediaQuery.of(context).size.width, + height: MediaQuery.of(context).size.height, + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer, mirror: true), + ), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _hangUp : _makeCall, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } + + void _switchCamera(String deviceId) async { + if (_localStream == null) return; + + await Helper.switchCamera( + _localStream!.getVideoTracks()[0], deviceId, _localStream); + setState(() {}); + } +} diff --git a/example/lib/src/loopback_data_channel_sample.dart b/example/lib/src/loopback_data_channel_sample.dart new file mode 100644 index 0000000000..ba02873324 --- /dev/null +++ b/example/lib/src/loopback_data_channel_sample.dart @@ -0,0 +1,158 @@ +import 'dart:async'; +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class DataChannelLoopBackSample extends StatefulWidget { + static String tag = 'data_channel_sample'; + + @override + _DataChannelLoopBackSampleState createState() => + _DataChannelLoopBackSampleState(); +} + +class _DataChannelLoopBackSampleState extends State { + RTCPeerConnection? _peerConnection1; + RTCPeerConnection? _peerConnection2; + RTCDataChannel? _dataChannel1; + RTCDataChannel? _dataChannel2; + String _dataChannel1Status = ''; + String _dataChannel2Status = ''; + + bool _inCalling = false; + + void _makeCall() async { + if (_peerConnection1 != null || _peerConnection2 != null) return; + + try { + _peerConnection1 = await createPeerConnection({'iceServers': []}); + _peerConnection2 = await createPeerConnection({'iceServers': []}); + + _peerConnection1!.onIceCandidate = (candidate) { + print('peerConnection1: onIceCandidate: ${candidate.candidate}'); + _peerConnection2!.addCandidate(candidate); + }; + + _peerConnection2!.onIceCandidate = (candidate) { + print('peerConnection2: onIceCandidate: ${candidate.candidate}'); + _peerConnection1!.addCandidate(candidate); + }; + + _dataChannel1 = await _peerConnection1!.createDataChannel( + 'peerConnection1-dc', RTCDataChannelInit()..id = 1); + + _peerConnection2!.onDataChannel = (channel) { + _dataChannel2 = channel; + _dataChannel2!.onDataChannelState = (state) { + setState(() { + _dataChannel2Status += '\ndataChannel2: state: ${state.toString()}'; + }); + }; + _dataChannel2!.onMessage = (data) async { + var bufferedAmount = await _dataChannel2!.getBufferedAmount(); + setState(() { + _dataChannel2Status += + '\ndataChannel2: Received message: ${data.text}, bufferedAmount: $bufferedAmount'; + }); + + await _dataChannel2!.send(RTCDataChannelMessage( + '(dataChannel2 ==> dataChannel1) Hello from dataChannel2 echo !!!')); + }; + }; + + _dataChannel1!.onDataChannelState = (state) { + setState(() { + _dataChannel1Status += '\ndataChannel1: state: ${state.toString()}'; + }); + if (state == RTCDataChannelState.RTCDataChannelOpen) { + _dataChannel1!.send(RTCDataChannelMessage( + '(dataChannel1 ==> dataChannel2) Hello from dataChannel1 !!!')); + } + }; + + _dataChannel1!.onMessage = (data) async { + var bufferedAmount = await _dataChannel2!.getBufferedAmount(); + _dataChannel1Status += + '\ndataChannel1: Received message: ${data.text}, bufferedAmount: $bufferedAmount'; + setState(() {}); + }; + + var offer = await _peerConnection1!.createOffer({}); + print('peerConnection1 offer: ${offer.sdp}'); + + await _peerConnection2!.setRemoteDescription(offer); + var answer = await _peerConnection2!.createAnswer(); + print('peerConnection2 answer: ${answer.sdp}'); + + await _peerConnection1!.setLocalDescription(offer); + await _peerConnection2!.setLocalDescription(answer); + + await _peerConnection1!.setRemoteDescription(answer); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + void _hangUp() async { + try { + await _dataChannel1?.close(); + setState(() { + _dataChannel1Status += '\n _dataChannel1.close()'; + }); + await _dataChannel2?.close(); + await _peerConnection1?.close(); + await _peerConnection2?.close(); + _peerConnection1 = null; + _peerConnection2 = null; + } catch (e) { + print(e.toString()); + } + setState(() { + _inCalling = false; + }); + + Timer(const Duration(seconds: 1), () { + if (mounted) { + setState(() { + _dataChannel1Status = ''; + _dataChannel2Status = ''; + }); + } + }); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('Data Channel Test'), + ), + body: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Text('(caller)data channel 1:\n'), + Container( + child: Text(_dataChannel1Status), + ), + Text('\n\n(callee)data channel 2:\n'), + Container( + child: Text(_dataChannel2Status), + ), + ], + ), + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _hangUp : _makeCall, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/loopback_sample_unified_tracks.dart b/example/lib/src/loopback_sample_unified_tracks.dart new file mode 100644 index 0000000000..ba84cabbf2 --- /dev/null +++ b/example/lib/src/loopback_sample_unified_tracks.dart @@ -0,0 +1,903 @@ +import 'dart:async'; +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class LoopBackSampleUnifiedTracks extends StatefulWidget { + static String tag = 'loopback_sample_unified_tracks'; + + @override + _MyAppState createState() => _MyAppState(); +} + +const List audioCodecList = [ + 'OPUS', + 'ISAC', + 'PCMA', + 'PCMU', + 'G729' +]; +const List videoCodecList = ['VP8', 'VP9', 'H264', 'AV1']; + +class _MyAppState extends State { + String audioDropdownValue = audioCodecList.first; + String videoDropdownValue = videoCodecList.first; + RTCRtpCapabilities? acaps; + RTCRtpCapabilities? vcaps; + MediaStream? _localStream; + RTCPeerConnection? _localPeerConnection; + RTCPeerConnection? _remotePeerConnection; + RTCRtpSender? _videoSender; + RTCRtpSender? _audioSender; + final _localRenderer = RTCVideoRenderer(); + final _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + bool _micOn = false; + bool _cameraOn = false; + bool _speakerOn = false; + bool _audioEncrypt = false; + bool _videoEncrypt = false; + bool _audioDecrypt = false; + bool _videoDecrypt = false; + List? _mediaDevicesList; + final FrameCryptorFactory _frameCyrptorFactory = frameCryptorFactory; + KeyProvider? _keySharedProvider; + final Map _frameCyrptors = {}; + Timer? _timer; + final _configuration = { + 'iceServers': [ + {'urls': 'stun:stun.l.google.com:19302'}, + ], + 'sdpSemantics': 'unified-plan', + 'encodedInsertableStreams': true, + }; + + final _constraints = { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': false}, + ], + }; + + final demoRatchetSalt = 'flutter-webrtc-ratchet-salt'; + + final aesKey = Uint8List.fromList([ + 200, + 244, + 58, + 72, + 214, + 245, + 86, + 82, + 192, + 127, + 23, + 153, + 167, + 172, + 122, + 234, + 140, + 70, + 175, + 74, + 61, + 11, + 134, + 58, + 185, + 102, + 172, + 17, + 11, + 6, + 119, + 253 + ]); + + @override + void initState() { + print('Init State'); + super.initState(); + + _refreshMediaDevices(); + navigator.mediaDevices.ondevicechange = (event) async { + print('++++++ ondevicechange ++++++'); + var devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _mediaDevicesList = devices; + }); + }; + } + + @override + void deactivate() { + super.deactivate(); + navigator.mediaDevices.ondevicechange = null; + _cleanUp(); + } + + Future _refreshMediaDevices() async { + var devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _mediaDevicesList = devices; + }); + } + + void _selectAudioOutput(String deviceId) async { + await _localRenderer.audioOutput(deviceId); + } + + void _selectAudioInput(String deviceId) async { + if (!WebRTC.platformIsWeb) { + await Helper.selectAudioInput(deviceId); + } + } + + void _cleanUp() async { + try { + await _localStream?.dispose(); + await _remotePeerConnection?.close(); + await _remotePeerConnection?.dispose(); + _remotePeerConnection = null; + await _localPeerConnection?.close(); + await _localPeerConnection?.dispose(); + _localPeerConnection = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + await _localRenderer.dispose(); + await _remoteRenderer.dispose(); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + setState(() { + _inCalling = false; + _cameraOn = false; + _micOn = false; + }); + } + + void initRenderers() async { + print('Init Renderers'); + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + void initLocalConnection() async { + if (_localPeerConnection != null) return; + try { + var pc = await createPeerConnection(_configuration, _constraints); + + pc.onSignalingState = (state) async { + var state2 = await pc.getSignalingState(); + print('local pc: onSignalingState($state), state2($state2)'); + }; + + pc.onIceGatheringState = (state) async { + var state2 = await pc.getIceGatheringState(); + print('local pc: onIceGatheringState($state), state2($state2)'); + }; + pc.onIceConnectionState = (state) async { + var state2 = await pc.getIceConnectionState(); + print('local pc: onIceConnectionState($state), state2($state2)'); + }; + pc.onConnectionState = (state) async { + var state2 = await pc.getConnectionState(); + print('local pc: onConnectionState($state), state2($state2)'); + }; + + pc.onIceCandidate = _onLocalCandidate; + pc.onRenegotiationNeeded = _onLocalRenegotiationNeeded; + + _localPeerConnection = pc; + } catch (e) { + print(e.toString()); + } + } + + void _onLocalCandidate(RTCIceCandidate localCandidate) async { + print('onLocalCandidate: ${localCandidate.candidate}'); + try { + var candidate = RTCIceCandidate( + localCandidate.candidate!, + localCandidate.sdpMid!, + localCandidate.sdpMLineIndex!, + ); + await _remotePeerConnection!.addCandidate(candidate); + } catch (e) { + print( + 'Unable to add candidate ${localCandidate.candidate} to remote connection'); + } + } + + void _onRemoteCandidate(RTCIceCandidate remoteCandidate) async { + print('onRemoteCandidate: ${remoteCandidate.candidate}'); + try { + var candidate = RTCIceCandidate( + remoteCandidate.candidate!, + remoteCandidate.sdpMid!, + remoteCandidate.sdpMLineIndex!, + ); + await _localPeerConnection!.addCandidate(candidate); + } catch (e) { + print( + 'Unable to add candidate ${remoteCandidate.candidate} to local connection'); + } + } + + void _onTrack(RTCTrackEvent event) async { + print('onTrack ${event.track.id}'); + + if (event.track.kind == 'video') { + setState(() { + _remoteRenderer.srcObject = event.streams[0]; + }); + } + } + + void _onLocalRenegotiationNeeded() { + print('LocalRenegotiationNeeded'); + } + + void _onRemoteRenegotiationNeeded() { + print('RemoteRenegotiationNeeded'); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _makeCall() async { + initRenderers(); + initLocalConnection(); + + var keyProviderOptions = KeyProviderOptions( + sharedKey: true, + ratchetSalt: Uint8List.fromList(demoRatchetSalt.codeUnits), + ratchetWindowSize: 16, + failureTolerance: -1, + ); + + _keySharedProvider ??= + await _frameCyrptorFactory.createDefaultKeyProvider(keyProviderOptions); + await _keySharedProvider?.setSharedKey(key: aesKey); + acaps = await getRtpSenderCapabilities('audio'); + print('sender audio capabilities: ${acaps!.toMap()}'); + + vcaps = await getRtpSenderCapabilities('video'); + print('sender video capabilities: ${vcaps!.toMap()}'); + + if (_remotePeerConnection != null) return; + + try { + var pc = await createPeerConnection(_configuration, _constraints); + + pc.onTrack = _onTrack; + + pc.onSignalingState = (state) async { + var state2 = await pc.getSignalingState(); + print('remote pc: onSignalingState($state), state2($state2)'); + }; + + pc.onIceGatheringState = (state) async { + var state2 = await pc.getIceGatheringState(); + print('remote pc: onIceGatheringState($state), state2($state2)'); + }; + pc.onIceConnectionState = (state) async { + var state2 = await pc.getIceConnectionState(); + print('remote pc: onIceConnectionState($state), state2($state2)'); + }; + pc.onConnectionState = (state) async { + var state2 = await pc.getConnectionState(); + print('remote pc: onConnectionState($state), state2($state2)'); + }; + + pc.onIceCandidate = _onRemoteCandidate; + pc.onRenegotiationNeeded = _onRemoteRenegotiationNeeded; + _remotePeerConnection = pc; + await _negotiate(); + } catch (e) { + print(e.toString()); + } + + if (!mounted) return; + setState(() { + _inCalling = true; + }); + } + + Future _negotiate() async { + final oaConstraints = { + 'mandatory': { + 'OfferToReceiveAudio': true, + 'OfferToReceiveVideo': true, + }, + 'optional': [], + }; + + if (_remotePeerConnection == null) return; + + var offer = await _localPeerConnection!.createOffer({}); + await _localPeerConnection!.setLocalDescription(offer); + var localDescription = await _localPeerConnection!.getLocalDescription(); + + await _remotePeerConnection!.setRemoteDescription(localDescription!); + var answer = await _remotePeerConnection!.createAnswer(oaConstraints); + await _remotePeerConnection!.setLocalDescription(answer); + var remoteDescription = await _remotePeerConnection!.getLocalDescription(); + + await _localPeerConnection!.setRemoteDescription(remoteDescription!); + } + + void _enableEncryption({bool video = false, bool enabled = true}) async { + var senders = await _localPeerConnection?.senders; + + var kind = video ? 'video' : 'audio'; + + senders?.forEach((element) async { + if (kind != element.track?.kind) return; + + var trackId = element.track?.id; + var id = kind + '_' + trackId! + '_sender'; + if (!_frameCyrptors.containsKey(id)) { + var frameCyrptor = + await _frameCyrptorFactory.createFrameCryptorForRtpSender( + participantId: id, + sender: element, + algorithm: Algorithm.kAesGcm, + keyProvider: _keySharedProvider!); + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('EN onFrameCryptorStateChanged $participantId $state'); + _frameCyrptors[id] = frameCyrptor; + await frameCyrptor.setKeyIndex(0); + } + + var _frameCyrptor = _frameCyrptors[id]; + await _frameCyrptor?.setEnabled(enabled); + await _frameCyrptor?.updateCodec( + kind == 'video' ? videoDropdownValue : audioDropdownValue); + }); + } + + void _enableDecryption({bool video = false, bool enabled = true}) async { + var receivers = await _remotePeerConnection?.receivers; + var kind = video ? 'video' : 'audio'; + receivers?.forEach((element) async { + if (kind != element.track?.kind) return; + var trackId = element.track?.id; + var id = kind + '_' + trackId! + '_receiver'; + if (!_frameCyrptors.containsKey(id)) { + var frameCyrptor = + await _frameCyrptorFactory.createFrameCryptorForRtpReceiver( + participantId: id, + receiver: element, + algorithm: Algorithm.kAesGcm, + keyProvider: _keySharedProvider!); + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('DE onFrameCryptorStateChanged $participantId $state'); + _frameCyrptors[id] = frameCyrptor; + await frameCyrptor.setKeyIndex(0); + } + + var _frameCyrptor = _frameCyrptors[id]; + await _frameCyrptor?.setEnabled(enabled); + await _frameCyrptor?.updateCodec( + kind == 'video' ? videoDropdownValue : audioDropdownValue); + }); + } + + void _hangUp() async { + try { + await _remotePeerConnection?.close(); + await _remotePeerConnection?.dispose(); + _remotePeerConnection = null; + _remoteRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + setState(() { + _inCalling = false; + }); + } + + void _ratchetKey() async { + var newKey = await _keySharedProvider?.ratchetSharedKey(index: 0); + print('newKey $newKey'); + } + + Map _getMediaConstraints({audio = true, video = true}) { + return { + 'audio': audio ? true : false, + 'video': video + ? { + 'mandatory': { + 'minWidth': '640', + 'minHeight': '480', + 'minFrameRate': '30', + }, + 'facingMode': 'user', + 'optional': [], + } + : false, + }; + } + + void _sendDtmf() async { + var dtmfSender = _audioSender?.dtmfSender; + await dtmfSender?.insertDTMF('123#'); + } + + void _startVideo() async { + var newStream = await navigator.mediaDevices + .getUserMedia(_getMediaConstraints(audio: false, video: true)); + if (_localStream != null) { + await _removeExistingVideoTrack(); + var tracks = newStream.getVideoTracks(); + for (var newTrack in tracks) { + await _localStream!.addTrack(newTrack); + } + } else { + _localStream = newStream; + } + + await _addOrReplaceVideoTracks(); + + var transceivers = await _localPeerConnection?.getTransceivers(); + transceivers?.forEach((transceiver) { + if (transceiver.sender.senderId != _videoSender?.senderId) return; + var codecs = vcaps?.codecs + ?.where((element) => element.mimeType + .toLowerCase() + .contains(videoDropdownValue.toLowerCase())) + .toList() ?? + []; + transceiver.setCodecPreferences(codecs); + }); + await _negotiate(); + + setState(() { + _localRenderer.srcObject = _localStream; + _cameraOn = true; + }); + + _timer?.cancel(); + _timer = Timer.periodic(Duration(seconds: 1), (timer) async { + //handleStatsReport(timer); + }); + } + + void _stopVideo() async { + _frameCyrptors.removeWhere((key, value) { + if (key.startsWith('video')) { + value.dispose(); + return true; + } + return false; + }); + + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + + await _removeExistingVideoTrack(fromConnection: true); + await _negotiate(); + setState(() { + _localRenderer.srcObject = null; + // onMute/onEnded/onUnmute are not wired up so having to force this here + _remoteRenderer.srcObject = null; + _cameraOn = false; + }); + _timer?.cancel(); + _timer = null; + } + + void _startAudio() async { + var newStream = await navigator.mediaDevices + .getUserMedia(_getMediaConstraints(audio: true, video: false)); + + if (_localStream != null) { + await _removeExistingAudioTrack(); + for (var newTrack in newStream.getAudioTracks()) { + await _localStream!.addTrack(newTrack); + } + } else { + _localStream = newStream; + } + + await _addOrReplaceAudioTracks(); + var transceivers = await _localPeerConnection?.getTransceivers(); + transceivers?.forEach((transceiver) { + if (transceiver.sender.senderId != _audioSender?.senderId) return; + var codecs = acaps?.codecs + ?.where((element) => element.mimeType + .toLowerCase() + .contains(audioDropdownValue.toLowerCase())) + .toList() ?? + []; + transceiver.setCodecPreferences(codecs); + }); + await _negotiate(); + setState(() { + _micOn = true; + }); + } + + void _stopAudio() async { + _frameCyrptors.removeWhere((key, value) { + if (key.startsWith('audio')) { + value.dispose(); + return true; + } + return false; + }); + await _removeExistingAudioTrack(fromConnection: true); + await _negotiate(); + setState(() { + _micOn = false; + }); + } + + void _switchSpeaker() async { + setState(() { + _speakerOn = !_speakerOn; + if (!WebRTC.platformIsWeb) { + Helper.setSpeakerphoneOn(_speakerOn); + } + }); + } + + void handleStatsReport(Timer timer) async { + if (_remotePeerConnection != null && _remoteRenderer.srcObject != null) { + var reports = await _remotePeerConnection + ?.getStats(_remoteRenderer.srcObject!.getVideoTracks().first); + reports?.forEach((report) { + print('report => { '); + print(' id: ' + report.id + ','); + print(' type: ' + report.type + ','); + print(' timestamp: ${report.timestamp},'); + print(' values => {'); + report.values.forEach((key, value) { + print(' ' + key + ' : ' + value.toString() + ', '); + }); + print(' }'); + print('}'); + }); + + /* + var senders = await _peerConnection.getSenders(); + var canInsertDTMF = await senders[0].dtmfSender.canInsertDtmf(); + print(canInsertDTMF); + await senders[0].dtmfSender.insertDTMF('1'); + var receivers = await _peerConnection.getReceivers(); + print(receivers[0].track.id); + var transceivers = await _peerConnection.getTransceivers(); + print(transceivers[0].sender.parameters); + print(transceivers[0].receiver.parameters); + */ + } + } + + Future _removeExistingVideoTrack({bool fromConnection = false}) async { + var tracks = _localStream!.getVideoTracks(); + for (var i = tracks.length - 1; i >= 0; i--) { + var track = tracks[i]; + if (fromConnection) { + await _connectionRemoveTrack(track); + } + try { + await _localStream!.removeTrack(track); + } catch (e) { + print(e.toString()); + } + await track.stop(); + } + } + + Future _removeExistingAudioTrack({bool fromConnection = false}) async { + var tracks = _localStream!.getAudioTracks(); + for (var i = tracks.length - 1; i >= 0; i--) { + var track = tracks[i]; + if (fromConnection) { + await _connectionRemoveTrack(track); + } + try { + await _localStream!.removeTrack(track); + } catch (e) { + print(e.toString()); + } + await track.stop(); + } + } + + Future _addOrReplaceVideoTracks() async { + for (var track in _localStream!.getVideoTracks()) { + await _connectionAddTrack(track, _localStream!); + } + } + + Future _addOrReplaceAudioTracks() async { + for (var track in _localStream!.getAudioTracks()) { + await _connectionAddTrack(track, _localStream!); + } + } + + Future _connectionAddTrack( + MediaStreamTrack track, MediaStream stream) async { + var sender = track.kind == 'video' ? _videoSender : _audioSender; + if (sender != null) { + print('Have a Sender of kind:${track.kind}'); + var trans = await _getSendersTransceiver(sender.senderId); + if (trans != null) { + print('Setting direction and replacing track with new track'); + await trans.setDirection(TransceiverDirection.SendOnly); + await trans.sender.replaceTrack(track); + } + } else { + if (track.kind == 'video') { + _videoSender = await _localPeerConnection!.addTrack(track, stream); + } else { + _audioSender = await _localPeerConnection!.addTrack(track, stream); + } + } + } + + Future _connectionRemoveTrack(MediaStreamTrack track) async { + var sender = track.kind == 'video' ? _videoSender : _audioSender; + if (sender != null) { + print('Have a Sender of kind:${track.kind}'); + var trans = await _getSendersTransceiver(sender.senderId); + if (trans != null) { + print('Setting direction and replacing track with null'); + await trans.setDirection(TransceiverDirection.Inactive); + await trans.sender.replaceTrack(null); + } + } + } + + Future _getSendersTransceiver(String senderId) async { + RTCRtpTransceiver? foundTrans; + var trans = await _localPeerConnection!.getTransceivers(); + for (var tran in trans) { + if (tran.sender.senderId == senderId) { + foundTrans = tran; + break; + } + } + return foundTrans; + } + + @override + Widget build(BuildContext context) { + var widgets = [ + Expanded( + child: Container( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + Row( + children: [ + Text('audio codec:'), + DropdownButton( + value: audioDropdownValue, + icon: const Icon( + Icons.arrow_drop_down, + color: Colors.blue, + ), + elevation: 16, + style: const TextStyle(color: Colors.blue), + underline: Container( + height: 2, + color: Colors.blueAccent, + ), + onChanged: (String? value) { + // This is called when the user selects an item. + setState(() { + audioDropdownValue = value!; + }); + }, + items: audioCodecList + .map>((String value) { + return DropdownMenuItem( + value: value, + child: Text(value), + ); + }).toList(), + ), + Text('video codec:'), + DropdownButton( + value: videoDropdownValue, + icon: const Icon( + Icons.arrow_drop_down, + color: Colors.blue, + ), + elevation: 16, + style: const TextStyle(color: Colors.blue), + underline: Container( + height: 2, + color: Colors.blueAccent, + ), + onChanged: (String? value) { + // This is called when the user selects an item. + setState(() { + videoDropdownValue = value!; + }); + }, + items: videoCodecList + .map>((String value) { + return DropdownMenuItem( + value: value, + child: Text(value), + ); + }).toList(), + ), + TextButton(onPressed: _ratchetKey, child: Text('Ratchet Key')) + ], + ), + Row( + children: [ + Text('audio encrypt:'), + Switch( + value: _audioEncrypt, + onChanged: (value) { + setState(() { + _audioEncrypt = value; + _enableEncryption(video: false, enabled: _audioEncrypt); + }); + }), + Text('video encrypt:'), + Switch( + value: _videoEncrypt, + onChanged: (value) { + setState(() { + _videoEncrypt = value; + _enableEncryption(video: true, enabled: _videoEncrypt); + }); + }) + ], + ), + Expanded( + child: RTCVideoView(_localRenderer, mirror: true), + ), + ], + )), + ), + Expanded( + child: Container( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + Row( + children: [ + Text('audio decrypt:'), + Switch( + value: _audioDecrypt, + onChanged: (value) { + setState(() { + _audioDecrypt = value; + _enableDecryption(video: false, enabled: _audioDecrypt); + }); + }), + Text('video decrypt:'), + Switch( + value: _videoDecrypt, + onChanged: (value) { + setState(() { + _videoDecrypt = value; + _enableDecryption(video: true, enabled: _videoDecrypt); + }); + }) + ], + ), + Expanded( + child: RTCVideoView(_remoteRenderer), + ), + ], + )), + ) + ]; + return Scaffold( + appBar: AppBar( + title: Text('LoopBack Unified Tracks example'), + actions: [ + IconButton( + icon: Icon(Icons.keyboard), + onPressed: _sendDtmf, + ), + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + ], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Stack( + children: [ + Container( + decoration: BoxDecoration(color: Colors.black54), + child: orientation == Orientation.portrait + ? Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets) + : Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets), + ), + Align( + alignment: Alignment.bottomCenter, + child: OverflowBar( + children: [ + FloatingActionButton( + heroTag: null, + backgroundColor: + _micOn ? null : Theme.of(context).disabledColor, + tooltip: _micOn ? 'Stop mic' : 'Start mic', + onPressed: _micOn ? _stopAudio : _startAudio, + child: Icon(_micOn ? Icons.mic : Icons.mic_off)), + FloatingActionButton( + heroTag: null, + backgroundColor: + _speakerOn ? null : Theme.of(context).disabledColor, + tooltip: _speakerOn ? 'Stop speaker' : 'Start speaker', + onPressed: _switchSpeaker, + child: Icon(_speakerOn + ? Icons.speaker_phone + : Icons.phone_in_talk)), + FloatingActionButton( + heroTag: null, + backgroundColor: + _cameraOn ? null : Theme.of(context).disabledColor, + tooltip: _cameraOn ? 'Stop camera' : 'Start camera', + onPressed: _cameraOn ? _stopVideo : _startVideo, + child: + Icon(_cameraOn ? Icons.videocam : Icons.videocam_off), + ), + FloatingActionButton( + heroTag: null, + backgroundColor: + _inCalling ? null : Theme.of(context).disabledColor, + onPressed: _inCalling ? _hangUp : _makeCall, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ) + ], + ), + ), + ], + ); + }, + ), + ); + } +} diff --git a/example/lib/src/loopback_sample_with_get_stats.dart b/example/lib/src/loopback_sample_with_get_stats.dart new file mode 100644 index 0000000000..2e457a69d0 --- /dev/null +++ b/example/lib/src/loopback_sample_with_get_stats.dart @@ -0,0 +1,158 @@ +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class LoopBackSampleWithGetStats extends StatefulWidget { + static String tag = 'loopback_sample_with_get_stats'; + + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + MediaStream? _localStream; + RTCPeerConnection? _senderPc, _receiverPc; + + final _localRenderer = RTCVideoRenderer(); + final _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + @override + void initState() { + super.initState(); + initRenderers(); + } + + @override + void deactivate() { + super.deactivate(); + _disconnect(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + } + + void initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _connect() async { + if (_inCalling) { + return; + } + + try { + _senderPc ??= + await createPeerConnection({'sdpSemantics': 'unified-plan'}); + + _receiverPc ??= + await createPeerConnection({'sdpSemantics': 'unified-plan'}); + + _senderPc!.onIceCandidate = (candidate) { + _receiverPc!.addCandidate(candidate); + }; + + _receiverPc!.onIceCandidate = (candidate) { + _senderPc!.addCandidate(candidate); + }; + + _receiverPc?.onAddTrack = (stream, track) { + _remoteRenderer.srcObject = stream; + }; + + // get user media stream + _localStream = await navigator.mediaDevices + .getUserMedia({'audio': true, 'video': true}); + _localRenderer.srcObject = _localStream; + + _localStream!.getTracks().forEach((track) { + _senderPc!.addTrack(track, _localStream!); + }); + + var offer = await _senderPc?.createOffer(); + + await _receiverPc?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + await _receiverPc?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + + await _senderPc?.setLocalDescription(offer!); + await _receiverPc?.setRemoteDescription(offer!); + var answer = await _receiverPc?.createAnswer({}); + await _receiverPc?.setLocalDescription(answer!); + await _senderPc?.setRemoteDescription(answer!); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + void _disconnect() async { + if (!_inCalling) { + return; + } + try { + await _localStream?.dispose(); + await _senderPc?.close(); + _senderPc = null; + await _receiverPc?.close(); + _receiverPc = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + setState(() { + _inCalling = false; + }); + } + + @override + Widget build(BuildContext context) { + var widgets = [ + Expanded( + child: RTCVideoView(_localRenderer, mirror: true), + ), + Expanded( + child: RTCVideoView(_remoteRenderer), + ) + ]; + return Scaffold( + appBar: AppBar( + title: Text('LoopBack with getStats'), + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + decoration: BoxDecoration(color: Colors.black54), + child: orientation == Orientation.portrait + ? Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets) + : Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets), + ), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _disconnect : _connect, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/example/lib/src/route_item.dart b/example/lib/src/route_item.dart index 78427b13a3..fd35cb9a85 100644 --- a/example/lib/src/route_item.dart +++ b/example/lib/src/route_item.dart @@ -1,16 +1,15 @@ import 'package:flutter/material.dart'; -import 'dart:core'; -typedef void RouteCallback(BuildContext context); +typedef RouteCallback = void Function(BuildContext context); class RouteItem { RouteItem({ - @required this.title, - @required this.subtitle, - @required this.push, + required this.title, + this.subtitle, + this.push, }); final String title; - final String subtitle; - final RouteCallback push; -} \ No newline at end of file + final String? subtitle; + final RouteCallback? push; +} diff --git a/example/lib/src/step-by-step-tutorial.txt b/example/lib/src/step-by-step-tutorial.txt new file mode 100644 index 0000000000..da7349c527 --- /dev/null +++ b/example/lib/src/step-by-step-tutorial.txt @@ -0,0 +1,21 @@ +* get user media +* get display media audio/video, audio only, video only +* get sources/change audio input/output +* audio/video loopback simple +* getStats +* replace track in calling, turn on/off video or audio +* set set codec preferences +* simulcast sender +* send dtmf +* ice restart +* muiltiple tracks on one peerconnection + +data channel +* data channel loopback simple +* transfer a file/data through data channel + +Insertable Streams: +* frame crypto (e2ee) +* frame processing (e.g. face detection, object detection, etc) +* custom audio/video source from image, or file +* capture audioFrame/videoFrame to file or image \ No newline at end of file diff --git a/example/lib/src/utils.dart b/example/lib/src/utils.dart new file mode 100644 index 0000000000..5cb6027f92 --- /dev/null +++ b/example/lib/src/utils.dart @@ -0,0 +1,124 @@ +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; + +void setPreferredCodec(RTCSessionDescription description, + {String audio = 'opus', String video = 'vp8'}) { + var capSel = CodecCapabilitySelector(description.sdp!); + var acaps = capSel.getCapabilities('audio'); + if (acaps != null) { + acaps.codecs = acaps.codecs + .where((e) => (e['codec'] as String).toLowerCase() == audio) + .toList(); + acaps.setCodecPreferences('audio', acaps.codecs); + capSel.setCapabilities(acaps); + } + + var vcaps = capSel.getCapabilities('video'); + if (vcaps != null) { + vcaps.codecs = vcaps.codecs + .where((e) => (e['codec'] as String).toLowerCase() == video) + .toList(); + vcaps.setCodecPreferences('video', vcaps.codecs); + capSel.setCapabilities(vcaps); + } + description.sdp = capSel.sdp(); +} + +class CodecCapability { + CodecCapability( + this.kind, this.payloads, this.codecs, this.fmtp, this.rtcpFb) { + codecs.forEach((element) { + element['orign_payload'] = element['payload']; + }); + } + String kind; + List rtcpFb; + List fmtp; + List payloads; + List codecs; + bool setCodecPreferences(String kind, List? newCodecs) { + if (newCodecs == null) { + return false; + } + var newRtcpFb = []; + var newFmtp = []; + var newPayloads = []; + newCodecs.forEach((element) { + var orign_payload = element['orign_payload'] as int; + var payload = element['payload'] as int; + // change payload type + if (payload != orign_payload) { + newRtcpFb.addAll(rtcpFb.where((e) { + if (e['payload'] == orign_payload) { + e['payload'] = payload; + return true; + } + return false; + }).toList()); + newFmtp.addAll(fmtp.where((e) { + if (e['payload'] == orign_payload) { + e['payload'] = payload; + return true; + } + return false; + }).toList()); + if (payloads.contains('$orign_payload')) { + newPayloads.add('$payload'); + } + } else { + newRtcpFb.addAll(rtcpFb.where((e) => e['payload'] == payload).toList()); + newFmtp.addAll(fmtp.where((e) => e['payload'] == payload).toList()); + newPayloads.addAll(payloads.where((e) => e == '$payload').toList()); + } + }); + rtcpFb = newRtcpFb; + fmtp = newFmtp; + payloads = newPayloads; + codecs = newCodecs; + return true; + } +} + +class CodecCapabilitySelector { + CodecCapabilitySelector(String sdp) { + _sdp = sdp; + _session = sdp_transform.parse(_sdp); + } + late String _sdp; + late Map _session; + Map get session => _session; + String sdp() => sdp_transform.write(_session, null); + + CodecCapability? getCapabilities(String kind) { + var mline = _mline(kind); + if (mline == null) { + return null; + } + var rtcpFb = mline['rtcpFb'] ?? []; + var fmtp = mline['fmtp'] ?? []; + var payloads = (mline['payloads'] as String).split(' '); + var codecs = mline['rtp'] ?? []; + return CodecCapability(kind, payloads, codecs, fmtp, rtcpFb); + } + + bool setCapabilities(CodecCapability? caps) { + if (caps == null) { + return false; + } + var mline = _mline(caps.kind); + if (mline == null) { + return false; + } + mline['payloads'] = caps.payloads.join(' '); + mline['rtp'] = caps.codecs; + mline['fmtp'] = caps.fmtp; + mline['rtcpFb'] = caps.rtcpFb; + return true; + } + + Map? _mline(String kind) { + var mlist = _session['media'] as List; + return mlist.firstWhere((element) => element['type'] == kind, + orElse: () => null); + } +} diff --git a/example/lib/src/widgets/screen_select_dialog.dart b/example/lib/src/widgets/screen_select_dialog.dart new file mode 100644 index 0000000000..c6e809d88e --- /dev/null +++ b/example/lib/src/widgets/screen_select_dialog.dart @@ -0,0 +1,310 @@ +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class ThumbnailWidget extends StatefulWidget { + const ThumbnailWidget( + {Key? key, + required this.source, + required this.selected, + required this.onTap}) + : super(key: key); + final DesktopCapturerSource source; + final bool selected; + final Function(DesktopCapturerSource) onTap; + + @override + _ThumbnailWidgetState createState() => _ThumbnailWidgetState(); +} + +class _ThumbnailWidgetState extends State { + final List _subscriptions = []; + Uint8List? _thumbnail; + @override + void initState() { + super.initState(); + _subscriptions.add(widget.source.onThumbnailChanged.stream.listen((event) { + setState(() { + _thumbnail = event; + }); + })); + _subscriptions.add(widget.source.onNameChanged.stream.listen((event) { + setState(() {}); + })); + } + + @override + void deactivate() { + _subscriptions.forEach((element) { + element.cancel(); + }); + super.deactivate(); + } + + @override + Widget build(BuildContext context) { + return Column( + children: [ + Expanded( + child: Container( + decoration: widget.selected + ? BoxDecoration( + border: Border.all(width: 2, color: Colors.blueAccent)) + : null, + child: InkWell( + onTap: () { + print('Selected source id => ${widget.source.id}'); + widget.onTap(widget.source); + }, + child: _thumbnail != null + ? Image.memory( + _thumbnail!, + gaplessPlayback: true, + alignment: Alignment.center, + ) + : Container(), + ), + )), + Text( + widget.source.name, + style: TextStyle( + fontSize: 12, + color: Colors.black87, + fontWeight: + widget.selected ? FontWeight.bold : FontWeight.normal), + ), + ], + ); + } +} + +// ignore: must_be_immutable +class ScreenSelectDialog extends Dialog { + ScreenSelectDialog() { + Future.delayed(Duration(milliseconds: 100), () { + _getSources(); + }); + _subscriptions.add(desktopCapturer.onAdded.stream.listen((source) { + _sources[source.id] = source; + _stateSetter?.call(() {}); + })); + + _subscriptions.add(desktopCapturer.onRemoved.stream.listen((source) { + _sources.remove(source.id); + _stateSetter?.call(() {}); + })); + + _subscriptions + .add(desktopCapturer.onThumbnailChanged.stream.listen((source) { + _stateSetter?.call(() {}); + })); + } + final Map _sources = {}; + SourceType _sourceType = SourceType.Screen; + DesktopCapturerSource? _selected_source; + final List> _subscriptions = []; + StateSetter? _stateSetter; + Timer? _timer; + + void _ok(context) async { + _timer?.cancel(); + _subscriptions.forEach((element) { + element.cancel(); + }); + Navigator.pop(context, _selected_source); + } + + void _cancel(context) async { + _timer?.cancel(); + _subscriptions.forEach((element) { + element.cancel(); + }); + Navigator.pop(context, null); + } + + Future _getSources() async { + try { + var sources = await desktopCapturer.getSources(types: [_sourceType]); + sources.forEach((element) { + print( + 'name: ${element.name}, id: ${element.id}, type: ${element.type}'); + }); + _timer?.cancel(); + _timer = Timer.periodic(Duration(seconds: 3), (timer) { + desktopCapturer.updateSources(types: [_sourceType]); + }); + _sources.clear(); + sources.forEach((element) { + _sources[element.id] = element; + }); + _stateSetter?.call(() {}); + return; + } catch (e) { + print(e.toString()); + } + } + + @override + Widget build(BuildContext context) { + return Material( + type: MaterialType.transparency, + child: Center( + child: Container( + width: 640, + height: 560, + color: Colors.white, + child: Column( + children: [ + Padding( + padding: EdgeInsets.all(10), + child: Stack( + children: [ + Align( + alignment: Alignment.topLeft, + child: Text( + 'Choose what to share', + style: TextStyle(fontSize: 16, color: Colors.black87), + ), + ), + Align( + alignment: Alignment.topRight, + child: InkWell( + child: Icon(Icons.close), + onTap: () => _cancel(context), + ), + ), + ], + ), + ), + Expanded( + flex: 1, + child: Container( + width: double.infinity, + padding: EdgeInsets.all(10), + child: StatefulBuilder( + builder: (context, setState) { + _stateSetter = setState; + return DefaultTabController( + length: 2, + child: Column( + children: [ + Container( + constraints: BoxConstraints.expand(height: 24), + child: TabBar( + onTap: (value) => + Future.delayed(Duration.zero, () { + _sourceType = value == 0 + ? SourceType.Screen + : SourceType.Window; + _getSources(); + }), + tabs: [ + Tab( + child: Text( + 'Entire Screen', + style: TextStyle(color: Colors.black54), + )), + Tab( + child: Text( + 'Window', + style: TextStyle(color: Colors.black54), + )), + ]), + ), + SizedBox( + height: 2, + ), + Expanded( + child: Container( + child: TabBarView(children: [ + Align( + alignment: Alignment.center, + child: Container( + child: GridView.count( + crossAxisSpacing: 8, + crossAxisCount: 2, + children: _sources.entries + .where((element) => + element.value.type == + SourceType.Screen) + .map((e) => ThumbnailWidget( + onTap: (source) { + setState(() { + _selected_source = source; + }); + }, + source: e.value, + selected: + _selected_source?.id == + e.value.id, + )) + .toList(), + ), + )), + Align( + alignment: Alignment.center, + child: Container( + child: GridView.count( + crossAxisSpacing: 8, + crossAxisCount: 3, + children: _sources.entries + .where((element) => + element.value.type == + SourceType.Window) + .map((e) => ThumbnailWidget( + onTap: (source) { + setState(() { + _selected_source = source; + }); + }, + source: e.value, + selected: + _selected_source?.id == + e.value.id, + )) + .toList(), + ), + )), + ]), + ), + ) + ], + ), + ); + }, + ), + ), + ), + Container( + width: double.infinity, + child: OverflowBar( + children: [ + MaterialButton( + child: Text( + 'Cancel', + style: TextStyle(color: Colors.black54), + ), + onPressed: () { + _cancel(context); + }, + ), + MaterialButton( + color: Theme.of(context).primaryColor, + child: Text( + 'Share', + ), + onPressed: () { + _ok(context); + }, + ), + ], + ), + ), + ], + ), + )), + ); + } +} diff --git a/example/linux/.gitignore b/example/linux/.gitignore new file mode 100644 index 0000000000..d3896c9844 --- /dev/null +++ b/example/linux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral diff --git a/example/linux/CMakeLists.txt b/example/linux/CMakeLists.txt new file mode 100644 index 0000000000..aec6e1080e --- /dev/null +++ b/example/linux/CMakeLists.txt @@ -0,0 +1,139 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.10) +project(runner LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "flutter_webrtc_example") +# The unique GTK application identifier for this application. See: +# https://wiki.gnome.org/HowDoI/ChooseApplicationID +set(APPLICATION_ID "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Load bundled libraries from the lib/ directory relative to the binary. +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Root filesystem for cross-building. +if(FLUTTER_TARGET_PLATFORM_SYSROOT) + set(CMAKE_SYSROOT ${FLUTTER_TARGET_PLATFORM_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH ${CMAKE_SYSROOT}) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +endif() + +# Define build configuration options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_14) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) + +add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}") + +# Define the application target. To change its name, change BINARY_NAME above, +# not the value here, or `flutter run` will no longer work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} + "main.cc" + "my_application.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add dependency libraries. Add any application-specific dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK) + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) + +# Only the install-generated bundle's copy of the executable will launch +# correctly, since the resources must in the right relative locations. To avoid +# people trying to run the unbundled copy, put it in a subdirectory instead of +# the default top-level location. +set_target_properties(${BINARY_NAME} + PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/intermediates_do_not_run" +) + + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +foreach(bundled_library ${PLUGIN_BUNDLED_LIBRARIES}) + install(FILES "${bundled_library}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endforeach(bundled_library) + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() diff --git a/example/linux/flutter/CMakeLists.txt b/example/linux/flutter/CMakeLists.txt new file mode 100644 index 0000000000..d5bd01648a --- /dev/null +++ b/example/linux/flutter/CMakeLists.txt @@ -0,0 +1,88 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.10) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +find_package(PkgConfig REQUIRED) +pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0) +pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) +pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0) + +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_linux_gtk.so") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/lib/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "fl_basic_message_channel.h" + "fl_binary_codec.h" + "fl_binary_messenger.h" + "fl_dart_project.h" + "fl_engine.h" + "fl_json_message_codec.h" + "fl_json_method_codec.h" + "fl_message_codec.h" + "fl_method_call.h" + "fl_method_channel.h" + "fl_method_codec.h" + "fl_method_response.h" + "fl_plugin_registrar.h" + "fl_plugin_registry.h" + "fl_standard_message_codec.h" + "fl_standard_method_codec.h" + "fl_string_codec.h" + "fl_value.h" + "fl_view.h" + "flutter_linux.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/flutter_linux/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE + PkgConfig::GTK + PkgConfig::GLIB + PkgConfig::GIO +) +add_dependencies(flutter flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CMAKE_CURRENT_BINARY_DIR}/_phony_ + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.sh" + ${FLUTTER_TARGET_PLATFORM} ${CMAKE_BUILD_TYPE} + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} +) diff --git a/example/linux/flutter/generated_plugin_registrant.cc b/example/linux/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..3f48831149 --- /dev/null +++ b/example/linux/flutter/generated_plugin_registrant.cc @@ -0,0 +1,15 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include + +void fl_register_plugins(FlPluginRegistry* registry) { + g_autoptr(FlPluginRegistrar) flutter_webrtc_registrar = + fl_plugin_registry_get_registrar_for_plugin(registry, "FlutterWebRTCPlugin"); + flutter_web_r_t_c_plugin_register_with_registrar(flutter_webrtc_registrar); +} diff --git a/example/linux/flutter/generated_plugin_registrant.h b/example/linux/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..e0f0a47bc0 --- /dev/null +++ b/example/linux/flutter/generated_plugin_registrant.h @@ -0,0 +1,15 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void fl_register_plugins(FlPluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ diff --git a/example/linux/flutter/generated_plugins.cmake b/example/linux/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..57172770e6 --- /dev/null +++ b/example/linux/flutter/generated_plugins.cmake @@ -0,0 +1,24 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc +) + +list(APPEND FLUTTER_FFI_PLUGIN_LIST +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/linux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) + +foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/linux plugins/${ffi_plugin}) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries}) +endforeach(ffi_plugin) diff --git a/example/linux/main.cc b/example/linux/main.cc new file mode 100644 index 0000000000..e7c5c54370 --- /dev/null +++ b/example/linux/main.cc @@ -0,0 +1,6 @@ +#include "my_application.h" + +int main(int argc, char** argv) { + g_autoptr(MyApplication) app = my_application_new(); + return g_application_run(G_APPLICATION(app), argc, argv); +} diff --git a/example/linux/my_application.cc b/example/linux/my_application.cc new file mode 100644 index 0000000000..0d05258c03 --- /dev/null +++ b/example/linux/my_application.cc @@ -0,0 +1,104 @@ +#include "my_application.h" + +#include +#ifdef GDK_WINDOWING_X11 +#include +#endif + +#include "flutter/generated_plugin_registrant.h" + +struct _MyApplication { + GtkApplication parent_instance; + char** dart_entrypoint_arguments; +}; + +G_DEFINE_TYPE(MyApplication, my_application, GTK_TYPE_APPLICATION) + +// Implements GApplication::activate. +static void my_application_activate(GApplication* application) { + MyApplication* self = MY_APPLICATION(application); + GtkWindow* window = + GTK_WINDOW(gtk_application_window_new(GTK_APPLICATION(application))); + + // Use a header bar when running in GNOME as this is the common style used + // by applications and is the setup most users will be using (e.g. Ubuntu + // desktop). + // If running on X and not using GNOME then just use a traditional title bar + // in case the window manager does more exotic layout, e.g. tiling. + // If running on Wayland assume the header bar will work (may need changing + // if future cases occur). + gboolean use_header_bar = TRUE; +#ifdef GDK_WINDOWING_X11 + GdkScreen* screen = gtk_window_get_screen(window); + if (GDK_IS_X11_SCREEN(screen)) { + const gchar* wm_name = gdk_x11_screen_get_window_manager_name(screen); + if (g_strcmp0(wm_name, "GNOME Shell") != 0) { + use_header_bar = FALSE; + } + } +#endif + if (use_header_bar) { + GtkHeaderBar* header_bar = GTK_HEADER_BAR(gtk_header_bar_new()); + gtk_widget_show(GTK_WIDGET(header_bar)); + gtk_header_bar_set_title(header_bar, "flutter_webrtc_example"); + gtk_header_bar_set_show_close_button(header_bar, TRUE); + gtk_window_set_titlebar(window, GTK_WIDGET(header_bar)); + } else { + gtk_window_set_title(window, "flutter_webrtc_example"); + } + + gtk_window_set_default_size(window, 1280, 720); + gtk_widget_show(GTK_WIDGET(window)); + + g_autoptr(FlDartProject) project = fl_dart_project_new(); + fl_dart_project_set_dart_entrypoint_arguments(project, self->dart_entrypoint_arguments); + + FlView* view = fl_view_new(project); + gtk_widget_show(GTK_WIDGET(view)); + gtk_container_add(GTK_CONTAINER(window), GTK_WIDGET(view)); + + fl_register_plugins(FL_PLUGIN_REGISTRY(view)); + + gtk_widget_grab_focus(GTK_WIDGET(view)); +} + +// Implements GApplication::local_command_line. +static gboolean my_application_local_command_line(GApplication* application, gchar*** arguments, int* exit_status) { + MyApplication* self = MY_APPLICATION(application); + // Strip out the first argument as it is the binary name. + self->dart_entrypoint_arguments = g_strdupv(*arguments + 1); + + g_autoptr(GError) error = nullptr; + if (!g_application_register(application, nullptr, &error)) { + g_warning("Failed to register: %s", error->message); + *exit_status = 1; + return TRUE; + } + + g_application_activate(application); + *exit_status = 0; + + return TRUE; +} + +// Implements GObject::dispose. +static void my_application_dispose(GObject* object) { + MyApplication* self = MY_APPLICATION(object); + g_clear_pointer(&self->dart_entrypoint_arguments, g_strfreev); + G_OBJECT_CLASS(my_application_parent_class)->dispose(object); +} + +static void my_application_class_init(MyApplicationClass* klass) { + G_APPLICATION_CLASS(klass)->activate = my_application_activate; + G_APPLICATION_CLASS(klass)->local_command_line = my_application_local_command_line; + G_OBJECT_CLASS(klass)->dispose = my_application_dispose; +} + +static void my_application_init(MyApplication* self) {} + +MyApplication* my_application_new() { + return MY_APPLICATION(g_object_new(my_application_get_type(), + "application-id", APPLICATION_ID, + "flags", G_APPLICATION_NON_UNIQUE, + nullptr)); +} diff --git a/example/linux/my_application.h b/example/linux/my_application.h new file mode 100644 index 0000000000..72271d5e41 --- /dev/null +++ b/example/linux/my_application.h @@ -0,0 +1,18 @@ +#ifndef FLUTTER_MY_APPLICATION_H_ +#define FLUTTER_MY_APPLICATION_H_ + +#include + +G_DECLARE_FINAL_TYPE(MyApplication, my_application, MY, APPLICATION, + GtkApplication) + +/** + * my_application_new: + * + * Creates a new Flutter-based application. + * + * Returns: a new #MyApplication. + */ +MyApplication* my_application_new(); + +#endif // FLUTTER_MY_APPLICATION_H_ diff --git a/example/macos/.gitignore b/example/macos/.gitignore new file mode 100644 index 0000000000..746adbb6b9 --- /dev/null +++ b/example/macos/.gitignore @@ -0,0 +1,7 @@ +# Flutter-related +**/Flutter/ephemeral/ +**/Pods/ + +# Xcode-related +**/dgph +**/xcuserdata/ diff --git a/example/macos/Flutter/Flutter-Debug.xcconfig b/example/macos/Flutter/Flutter-Debug.xcconfig new file mode 100644 index 0000000000..4b81f9b2d2 --- /dev/null +++ b/example/macos/Flutter/Flutter-Debug.xcconfig @@ -0,0 +1,2 @@ +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include "ephemeral/Flutter-Generated.xcconfig" diff --git a/example/macos/Flutter/Flutter-Release.xcconfig b/example/macos/Flutter/Flutter-Release.xcconfig new file mode 100644 index 0000000000..5caa9d1579 --- /dev/null +++ b/example/macos/Flutter/Flutter-Release.xcconfig @@ -0,0 +1,2 @@ +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include "ephemeral/Flutter-Generated.xcconfig" diff --git a/example/macos/Flutter/GeneratedPluginRegistrant.swift b/example/macos/Flutter/GeneratedPluginRegistrant.swift new file mode 100644 index 0000000000..194710fc5a --- /dev/null +++ b/example/macos/Flutter/GeneratedPluginRegistrant.swift @@ -0,0 +1,14 @@ +// +// Generated file. Do not edit. +// + +import FlutterMacOS +import Foundation + +import flutter_webrtc +import path_provider_foundation + +func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) { + FlutterWebRTCPlugin.register(with: registry.registrar(forPlugin: "FlutterWebRTCPlugin")) + PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin")) +} diff --git a/example/macos/Podfile b/example/macos/Podfile new file mode 100644 index 0000000000..b52666a103 --- /dev/null +++ b/example/macos/Podfile @@ -0,0 +1,43 @@ +platform :osx, '10.15' + +# CocoaPods analytics sends network stats synchronously affecting flutter build latency. +ENV['COCOAPODS_DISABLE_STATS'] = 'true' + +project 'Runner', { + 'Debug' => :debug, + 'Profile' => :release, + 'Release' => :release, +} + +def flutter_root + generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'ephemeral', 'Flutter-Generated.xcconfig'), __FILE__) + unless File.exist?(generated_xcode_build_settings_path) + raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure \"flutter pub get\" is executed first" + end + + File.foreach(generated_xcode_build_settings_path) do |line| + matches = line.match(/FLUTTER_ROOT\=(.*)/) + return matches[1].strip if matches + end + raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Flutter-Generated.xcconfig, then run \"flutter pub get\"" +end + +require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root) + +flutter_macos_podfile_setup + +target 'Runner' do + use_frameworks! + use_modular_headers! + + flutter_install_all_macos_pods File.dirname(File.realpath(__FILE__)) + target 'RunnerTests' do + inherit! :search_paths + end +end + +post_install do |installer| + installer.pods_project.targets.each do |target| + flutter_additional_macos_build_settings(target) + end +end diff --git a/example/macos/Runner.xcodeproj/project.pbxproj b/example/macos/Runner.xcodeproj/project.pbxproj new file mode 100644 index 0000000000..e2c46d9f3f --- /dev/null +++ b/example/macos/Runner.xcodeproj/project.pbxproj @@ -0,0 +1,698 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 54; + objects = { + +/* Begin PBXAggregateTarget section */ + 33CC111A2044C6BA0003C045 /* Flutter Assemble */ = { + isa = PBXAggregateTarget; + buildConfigurationList = 33CC111B2044C6BA0003C045 /* Build configuration list for PBXAggregateTarget "Flutter Assemble" */; + buildPhases = ( + 33CC111E2044C6BF0003C045 /* ShellScript */, + ); + dependencies = ( + ); + name = "Flutter Assemble"; + productName = FLX; + }; +/* End PBXAggregateTarget section */ + +/* Begin PBXBuildFile section */ + 331C80D8294CF71000263BE5 /* RunnerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331C80D7294CF71000263BE5 /* RunnerTests.swift */; }; + 335BBD1B22A9A15E00E9071D /* GeneratedPluginRegistrant.swift in Sources */ = {isa = PBXBuildFile; fileRef = 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */; }; + 33CC10F12044A3C60003C045 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33CC10F02044A3C60003C045 /* AppDelegate.swift */; }; + 33CC10F32044A3C60003C045 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 33CC10F22044A3C60003C045 /* Assets.xcassets */; }; + 33CC10F62044A3C60003C045 /* MainMenu.xib in Resources */ = {isa = PBXBuildFile; fileRef = 33CC10F42044A3C60003C045 /* MainMenu.xib */; }; + 33CC11132044BFA00003C045 /* MainFlutterWindow.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + 331C80D9294CF71000263BE5 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 33CC10E52044A3C60003C045 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 33CC10EC2044A3C60003C045; + remoteInfo = Runner; + }; + 33CC111F2044C79F0003C045 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 33CC10E52044A3C60003C045 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 33CC111A2044C6BA0003C045; + remoteInfo = FLX; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 33CC110E2044A8840003C045 /* Bundle Framework */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + ); + name = "Bundle Framework"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 331C80D5294CF71000263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + 331C80D7294CF71000263BE5 /* RunnerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunnerTests.swift; sourceTree = ""; }; + 333000ED22D3DE5D00554162 /* Warnings.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Warnings.xcconfig; sourceTree = ""; }; + 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GeneratedPluginRegistrant.swift; sourceTree = ""; }; + 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = flutter_webrtc_example.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 33CC10F02044A3C60003C045 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 33CC10F22044A3C60003C045 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Assets.xcassets; path = Runner/Assets.xcassets; sourceTree = ""; }; + 33CC10F52044A3C60003C045 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/MainMenu.xib; sourceTree = ""; }; + 33CC10F72044A3C60003C045 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; name = Info.plist; path = Runner/Info.plist; sourceTree = ""; }; + 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MainFlutterWindow.swift; sourceTree = ""; }; + 33CEB47222A05771004F2AC0 /* Flutter-Debug.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Flutter-Debug.xcconfig"; sourceTree = ""; }; + 33CEB47422A05771004F2AC0 /* Flutter-Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Flutter-Release.xcconfig"; sourceTree = ""; }; + 33CEB47722A0578A004F2AC0 /* Flutter-Generated.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = "Flutter-Generated.xcconfig"; path = "ephemeral/Flutter-Generated.xcconfig"; sourceTree = ""; }; + 33E51913231747F40026EE4D /* DebugProfile.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = DebugProfile.entitlements; sourceTree = ""; }; + 33E51914231749380026EE4D /* Release.entitlements */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.entitlements; path = Release.entitlements; sourceTree = ""; }; + 33E5194F232828860026EE4D /* AppInfo.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = AppInfo.xcconfig; sourceTree = ""; }; + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Release.xcconfig; sourceTree = ""; }; + 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Debug.xcconfig; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 331C80D2294CF70F00263BE5 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 33CC10EA2044A3C60003C045 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 331C80D6294CF71000263BE5 /* RunnerTests */ = { + isa = PBXGroup; + children = ( + 331C80D7294CF71000263BE5 /* RunnerTests.swift */, + ); + path = RunnerTests; + sourceTree = ""; + }; + 33BA886A226E78AF003329D5 /* Configs */ = { + isa = PBXGroup; + children = ( + 33E5194F232828860026EE4D /* AppInfo.xcconfig */, + 9740EEB21CF90195004384FC /* Debug.xcconfig */, + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, + 333000ED22D3DE5D00554162 /* Warnings.xcconfig */, + ); + path = Configs; + sourceTree = ""; + }; + 33CC10E42044A3C60003C045 = { + isa = PBXGroup; + children = ( + 33FAB671232836740065AC1E /* Runner */, + 33CEB47122A05771004F2AC0 /* Flutter */, + 331C80D6294CF71000263BE5 /* RunnerTests */, + 33CC10EE2044A3C60003C045 /* Products */, + D73912EC22F37F3D000D13A0 /* Frameworks */, + ); + sourceTree = ""; + }; + 33CC10EE2044A3C60003C045 /* Products */ = { + isa = PBXGroup; + children = ( + 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */, + 331C80D5294CF71000263BE5 /* RunnerTests.xctest */, + ); + name = Products; + sourceTree = ""; + }; + 33CC11242044D66E0003C045 /* Resources */ = { + isa = PBXGroup; + children = ( + 33CC10F22044A3C60003C045 /* Assets.xcassets */, + 33CC10F42044A3C60003C045 /* MainMenu.xib */, + 33CC10F72044A3C60003C045 /* Info.plist */, + ); + name = Resources; + path = ..; + sourceTree = ""; + }; + 33CEB47122A05771004F2AC0 /* Flutter */ = { + isa = PBXGroup; + children = ( + 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */, + 33CEB47222A05771004F2AC0 /* Flutter-Debug.xcconfig */, + 33CEB47422A05771004F2AC0 /* Flutter-Release.xcconfig */, + 33CEB47722A0578A004F2AC0 /* Flutter-Generated.xcconfig */, + ); + path = Flutter; + sourceTree = ""; + }; + 33FAB671232836740065AC1E /* Runner */ = { + isa = PBXGroup; + children = ( + 33CC10F02044A3C60003C045 /* AppDelegate.swift */, + 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */, + 33E51913231747F40026EE4D /* DebugProfile.entitlements */, + 33E51914231749380026EE4D /* Release.entitlements */, + 33CC11242044D66E0003C045 /* Resources */, + 33BA886A226E78AF003329D5 /* Configs */, + ); + path = Runner; + sourceTree = ""; + }; + D73912EC22F37F3D000D13A0 /* Frameworks */ = { + isa = PBXGroup; + children = ( + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 331C80D4294CF70F00263BE5 /* RunnerTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = 331C80DE294CF71000263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; + buildPhases = ( + 331C80D1294CF70F00263BE5 /* Sources */, + 331C80D2294CF70F00263BE5 /* Frameworks */, + 331C80D3294CF70F00263BE5 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + 331C80DA294CF71000263BE5 /* PBXTargetDependency */, + ); + name = RunnerTests; + productName = RunnerTests; + productReference = 331C80D5294CF71000263BE5 /* RunnerTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; + 33CC10EC2044A3C60003C045 /* Runner */ = { + isa = PBXNativeTarget; + buildConfigurationList = 33CC10FB2044A3C60003C045 /* Build configuration list for PBXNativeTarget "Runner" */; + buildPhases = ( + 33CC10E92044A3C60003C045 /* Sources */, + 33CC10EA2044A3C60003C045 /* Frameworks */, + 33CC10EB2044A3C60003C045 /* Resources */, + 33CC110E2044A8840003C045 /* Bundle Framework */, + 3399D490228B24CF009A79C7 /* ShellScript */, + ); + buildRules = ( + ); + dependencies = ( + 33CC11202044C79F0003C045 /* PBXTargetDependency */, + ); + name = Runner; + productName = Runner; + productReference = 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 33CC10E52044A3C60003C045 /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 0920; + LastUpgradeCheck = 1300; + ORGANIZATIONNAME = ""; + TargetAttributes = { + 331C80D4294CF70F00263BE5 = { + CreatedOnToolsVersion = 14.0; + TestTargetID = 33CC10EC2044A3C60003C045; + }; + 33CC10EC2044A3C60003C045 = { + CreatedOnToolsVersion = 9.2; + LastSwiftMigration = 1100; + ProvisioningStyle = Automatic; + SystemCapabilities = { + com.apple.Sandbox = { + enabled = 1; + }; + }; + }; + 33CC111A2044C6BA0003C045 = { + CreatedOnToolsVersion = 9.2; + ProvisioningStyle = Manual; + }; + }; + }; + buildConfigurationList = 33CC10E82044A3C60003C045 /* Build configuration list for PBXProject "Runner" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 33CC10E42044A3C60003C045; + productRefGroup = 33CC10EE2044A3C60003C045 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 33CC10EC2044A3C60003C045 /* Runner */, + 331C80D4294CF70F00263BE5 /* RunnerTests */, + 33CC111A2044C6BA0003C045 /* Flutter Assemble */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 331C80D3294CF70F00263BE5 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 33CC10EB2044A3C60003C045 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 33CC10F32044A3C60003C045 /* Assets.xcassets in Resources */, + 33CC10F62044A3C60003C045 /* MainMenu.xib in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 3399D490228B24CF009A79C7 /* ShellScript */ = { + isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + ); + outputFileListPaths = ( + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "echo \"$PRODUCT_NAME.app\" > \"$PROJECT_DIR\"/Flutter/ephemeral/.app_filename && \"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh embed\n"; + }; + 33CC111E2044C6BF0003C045 /* ShellScript */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + Flutter/ephemeral/FlutterInputs.xcfilelist, + ); + inputPaths = ( + Flutter/ephemeral/tripwire, + ); + outputFileListPaths = ( + Flutter/ephemeral/FlutterOutputs.xcfilelist, + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh && touch Flutter/ephemeral/tripwire"; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 331C80D1294CF70F00263BE5 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 331C80D8294CF71000263BE5 /* RunnerTests.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + 33CC10E92044A3C60003C045 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 33CC11132044BFA00003C045 /* MainFlutterWindow.swift in Sources */, + 33CC10F12044A3C60003C045 /* AppDelegate.swift in Sources */, + 335BBD1B22A9A15E00E9071D /* GeneratedPluginRegistrant.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + 331C80DA294CF71000263BE5 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 33CC10EC2044A3C60003C045 /* Runner */; + targetProxy = 331C80D9294CF71000263BE5 /* PBXContainerItemProxy */; + }; + 33CC11202044C79F0003C045 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 33CC111A2044C6BA0003C045 /* Flutter Assemble */; + targetProxy = 33CC111F2044C79F0003C045 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + 33CC10F42044A3C60003C045 /* MainMenu.xib */ = { + isa = PBXVariantGroup; + children = ( + 33CC10F52044A3C60003C045 /* Base */, + ); + name = MainMenu.xib; + path = Runner; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 331C80DB294CF71000263BE5 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; + }; + name = Debug; + }; + 331C80DC294CF71000263BE5 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; + }; + name = Release; + }; + 331C80DD294CF71000263BE5 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; + }; + name = Profile; + }; + 338D0CE9231458BD00FA5F75 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CODE_SIGN_IDENTITY = "-"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + MACOSX_DEPLOYMENT_TARGET = 10.14; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = macosx; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + }; + name = Profile; + }; + 338D0CEA231458BD00FA5F75 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 33E5194F232828860026EE4D /* AppInfo.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_ENTITLEMENTS = Runner/DebugProfile.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + MACOSX_DEPLOYMENT_TARGET = 15.0; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_VERSION = 5.0; + }; + name = Profile; + }; + 338D0CEB231458BD00FA5F75 /* Profile */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Manual; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Profile; + }; + 33CC10F92044A3C60003C045 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CODE_SIGN_IDENTITY = "-"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + MACOSX_DEPLOYMENT_TARGET = 10.14; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = macosx; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 33CC10FA2044A3C60003C045 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CODE_SIGN_IDENTITY = "-"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + MACOSX_DEPLOYMENT_TARGET = 10.14; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = macosx; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + }; + name = Release; + }; + 33CC10FC2044A3C60003C045 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 33E5194F232828860026EE4D /* AppInfo.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_ENTITLEMENTS = Runner/DebugProfile.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + MACOSX_DEPLOYMENT_TARGET = 15.0; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + }; + name = Debug; + }; + 33CC10FD2044A3C60003C045 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 33E5194F232828860026EE4D /* AppInfo.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_ENTITLEMENTS = Runner/Release.entitlements; + CODE_SIGN_STYLE = Automatic; + COMBINE_HIDPI_IMAGES = YES; + INFOPLIST_FILE = Runner/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/../Frameworks", + ); + MACOSX_DEPLOYMENT_TARGET = 15.0; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_VERSION = 5.0; + }; + name = Release; + }; + 33CC111C2044C6BA0003C045 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Manual; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Debug; + }; + 33CC111D2044C6BA0003C045 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 331C80DE294CF71000263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 331C80DB294CF71000263BE5 /* Debug */, + 331C80DC294CF71000263BE5 /* Release */, + 331C80DD294CF71000263BE5 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 33CC10E82044A3C60003C045 /* Build configuration list for PBXProject "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 33CC10F92044A3C60003C045 /* Debug */, + 33CC10FA2044A3C60003C045 /* Release */, + 338D0CE9231458BD00FA5F75 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 33CC10FB2044A3C60003C045 /* Build configuration list for PBXNativeTarget "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 33CC10FC2044A3C60003C045 /* Debug */, + 33CC10FD2044A3C60003C045 /* Release */, + 338D0CEA231458BD00FA5F75 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 33CC111B2044C6BA0003C045 /* Build configuration list for PBXAggregateTarget "Flutter Assemble" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 33CC111C2044C6BA0003C045 /* Debug */, + 33CC111D2044C6BA0003C045 /* Release */, + 338D0CEB231458BD00FA5F75 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 33CC10E52044A3C60003C045 /* Project object */; +} diff --git a/example/macos/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/macos/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/macos/Runner.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme new file mode 100644 index 0000000000..a4df20256a --- /dev/null +++ b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -0,0 +1,98 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/macos/Runner.xcworkspace/contents.xcworkspacedata b/example/macos/Runner.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000000..1d526a16ed --- /dev/null +++ b/example/macos/Runner.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/example/macos/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/example/macos/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000..18d981003d --- /dev/null +++ b/example/macos/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/example/macos/Runner/AppDelegate.swift b/example/macos/Runner/AppDelegate.swift new file mode 100644 index 0000000000..8e02df2888 --- /dev/null +++ b/example/macos/Runner/AppDelegate.swift @@ -0,0 +1,9 @@ +import Cocoa +import FlutterMacOS + +@main +class AppDelegate: FlutterAppDelegate { + override func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool { + return true + } +} diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000000..a2ec33f19f --- /dev/null +++ b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,68 @@ +{ + "images" : [ + { + "size" : "16x16", + "idiom" : "mac", + "filename" : "app_icon_16.png", + "scale" : "1x" + }, + { + "size" : "16x16", + "idiom" : "mac", + "filename" : "app_icon_32.png", + "scale" : "2x" + }, + { + "size" : "32x32", + "idiom" : "mac", + "filename" : "app_icon_32.png", + "scale" : "1x" + }, + { + "size" : "32x32", + "idiom" : "mac", + "filename" : "app_icon_64.png", + "scale" : "2x" + }, + { + "size" : "128x128", + "idiom" : "mac", + "filename" : "app_icon_128.png", + "scale" : "1x" + }, + { + "size" : "128x128", + "idiom" : "mac", + "filename" : "app_icon_256.png", + "scale" : "2x" + }, + { + "size" : "256x256", + "idiom" : "mac", + "filename" : "app_icon_256.png", + "scale" : "1x" + }, + { + "size" : "256x256", + "idiom" : "mac", + "filename" : "app_icon_512.png", + "scale" : "2x" + }, + { + "size" : "512x512", + "idiom" : "mac", + "filename" : "app_icon_512.png", + "scale" : "1x" + }, + { + "size" : "512x512", + "idiom" : "mac", + "filename" : "app_icon_1024.png", + "scale" : "2x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_1024.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_1024.png new file mode 100644 index 0000000000..82b6f9d9a3 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_1024.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_128.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_128.png new file mode 100644 index 0000000000..13b35eba55 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_128.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_16.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_16.png new file mode 100644 index 0000000000..0a3f5fa40f Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_16.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_256.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_256.png new file mode 100644 index 0000000000..bdb57226d5 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_256.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_32.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_32.png new file mode 100644 index 0000000000..f083318e09 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_32.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_512.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_512.png new file mode 100644 index 0000000000..326c0e72c9 Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_512.png differ diff --git a/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_64.png b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_64.png new file mode 100644 index 0000000000..2f1632cfdd Binary files /dev/null and b/example/macos/Runner/Assets.xcassets/AppIcon.appiconset/app_icon_64.png differ diff --git a/example/macos/Runner/Base.lproj/MainMenu.xib b/example/macos/Runner/Base.lproj/MainMenu.xib new file mode 100644 index 0000000000..80e867a4e0 --- /dev/null +++ b/example/macos/Runner/Base.lproj/MainMenu.xib @@ -0,0 +1,343 @@ + + + + + + + + + + + + + + + + + + + + + + +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/example/macos/Runner/Configs/AppInfo.xcconfig b/example/macos/Runner/Configs/AppInfo.xcconfig new file mode 100644 index 0000000000..27d7cbf31a --- /dev/null +++ b/example/macos/Runner/Configs/AppInfo.xcconfig @@ -0,0 +1,14 @@ +// Application-level settings for the Runner target. +// +// This may be replaced with something auto-generated from metadata (e.g., pubspec.yaml) in the +// future. If not, the values below would default to using the project name when this becomes a +// 'flutter create' template. + +// The application's name. By default this is also the title of the Flutter window. +PRODUCT_NAME = flutter_webrtc_example + +// The application's bundle identifier +PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample + +// The copyright displayed in application information +PRODUCT_COPYRIGHT = Copyright © 2023 com.cloudwebrtc.flutter-flutter-example. All rights reserved. diff --git a/example/macos/Runner/Configs/Debug.xcconfig b/example/macos/Runner/Configs/Debug.xcconfig new file mode 100644 index 0000000000..36b0fd9464 --- /dev/null +++ b/example/macos/Runner/Configs/Debug.xcconfig @@ -0,0 +1,2 @@ +#include "../../Flutter/Flutter-Debug.xcconfig" +#include "Warnings.xcconfig" diff --git a/example/macos/Runner/Configs/Release.xcconfig b/example/macos/Runner/Configs/Release.xcconfig new file mode 100644 index 0000000000..dff4f49561 --- /dev/null +++ b/example/macos/Runner/Configs/Release.xcconfig @@ -0,0 +1,2 @@ +#include "../../Flutter/Flutter-Release.xcconfig" +#include "Warnings.xcconfig" diff --git a/example/macos/Runner/Configs/Warnings.xcconfig b/example/macos/Runner/Configs/Warnings.xcconfig new file mode 100644 index 0000000000..42bcbf4780 --- /dev/null +++ b/example/macos/Runner/Configs/Warnings.xcconfig @@ -0,0 +1,13 @@ +WARNING_CFLAGS = -Wall -Wconditional-uninitialized -Wnullable-to-nonnull-conversion -Wmissing-method-return-type -Woverlength-strings +GCC_WARN_UNDECLARED_SELECTOR = YES +CLANG_UNDEFINED_BEHAVIOR_SANITIZER_NULLABILITY = YES +CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE +CLANG_WARN__DUPLICATE_METHOD_MATCH = YES +CLANG_WARN_PRAGMA_PACK = YES +CLANG_WARN_STRICT_PROTOTYPES = YES +CLANG_WARN_COMMA = YES +GCC_WARN_STRICT_SELECTOR_MATCH = YES +CLANG_WARN_OBJC_REPEATED_USE_OF_WEAK = YES +CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES +GCC_WARN_SHADOW = YES +CLANG_WARN_UNREACHABLE_CODE = YES diff --git a/example/macos/Runner/DebugProfile.entitlements b/example/macos/Runner/DebugProfile.entitlements new file mode 100644 index 0000000000..ab97dbc1c3 --- /dev/null +++ b/example/macos/Runner/DebugProfile.entitlements @@ -0,0 +1,18 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.cs.allow-jit + + com.apple.security.network.server + + com.apple.security.device.camera + + com.apple.security.device.microphone + + com.apple.security.network.client + + + diff --git a/example/macos/Runner/Info.plist b/example/macos/Runner/Info.plist new file mode 100644 index 0000000000..f2c091fe03 --- /dev/null +++ b/example/macos/Runner/Info.plist @@ -0,0 +1,38 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIconFile + + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + NSCameraUsageDescription + $(PRODUCT_NAME) Camera Usage! + NSMicrophoneUsageDescription + $(PRODUCT_NAME) Microphone Usage! + NSCameraUseContinuityCameraDeviceType + $(PRODUCT_NAME) Continuity Camera Usage! + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + LSMinimumSystemVersion + $(MACOSX_DEPLOYMENT_TARGET) + NSHumanReadableCopyright + $(PRODUCT_COPYRIGHT) + NSMainNibFile + MainMenu + NSPrincipalClass + NSApplication + + diff --git a/example/macos/Runner/MainFlutterWindow.swift b/example/macos/Runner/MainFlutterWindow.swift new file mode 100644 index 0000000000..3cc05eb234 --- /dev/null +++ b/example/macos/Runner/MainFlutterWindow.swift @@ -0,0 +1,15 @@ +import Cocoa +import FlutterMacOS + +class MainFlutterWindow: NSWindow { + override func awakeFromNib() { + let flutterViewController = FlutterViewController() + let windowFrame = self.frame + self.contentViewController = flutterViewController + self.setFrame(windowFrame, display: true) + + RegisterGeneratedPlugins(registry: flutterViewController) + + super.awakeFromNib() + } +} diff --git a/example/macos/Runner/Release.entitlements b/example/macos/Runner/Release.entitlements new file mode 100644 index 0000000000..9fb1f51c72 --- /dev/null +++ b/example/macos/Runner/Release.entitlements @@ -0,0 +1,16 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.device.camera + + com.apple.security.device.microphone + + com.apple.security.network.client + + com.apple.security.network.server + + + diff --git a/example/macos/RunnerTests/RunnerTests.swift b/example/macos/RunnerTests/RunnerTests.swift new file mode 100644 index 0000000000..5418c9f539 --- /dev/null +++ b/example/macos/RunnerTests/RunnerTests.swift @@ -0,0 +1,12 @@ +import FlutterMacOS +import Cocoa +import XCTest + +class RunnerTests: XCTestCase { + + func testExample() { + // If you add code to the Runner application, consider adding tests here. + // See https://developer.apple.com/documentation/xctest for more information about using XCTest. + } + +} diff --git a/example/pubspec.yaml b/example/pubspec.yaml index eb984eb68e..9c5d1492b5 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -1,29 +1,36 @@ -name: webrtc_example +name: flutter_webrtc_example description: Demonstrates how to use the webrtc plugin. +version: 1.0.0 +publish_to: none +environment: + sdk: ">=3.3.0 <4.0.0" dependencies: - flutter: - sdk: flutter - # The following adds the Cupertino Icons font to your application. # Use with the CupertinoIcons class for iOS style icons. - cupertino_icons: ^0.1.0 + cupertino_icons: ^1.0.2 + flutter: + sdk: flutter + flutter_background: ^1.0.0 + flutter_webrtc: + path: ../ + # Required for MediaRecorder example + gallery_saver_plus: 3.2.4 + path_provider: ^2.0.2 + permission_handler: ^11.3.1 + sdp_transform: ^0.3.2 dev_dependencies: flutter_test: sdk: flutter - webrtc: - path: ../ - - shared_preferences: ^0.4.2 + pedantic: ^1.11.0 # For information on the generic Dart part of this file, see the # following page: https://www.dartlang.org/tools/pub/pubspec # The following section is specific to Flutter. flutter: - # The following line ensures that the Material Icons font is # included with your application, so that you can use the icons in # the material Icons class. @@ -57,5 +64,5 @@ flutter: # - asset: fonts/TrajanPro_Bold.ttf # weight: 700 # - # For details regarding fonts from package dependencies, + # For details regarding fonts from package dependencies, # see https://flutter.io/custom-fonts/#from-packages diff --git a/example/screenshots/android-01.png b/example/screenshots/android-01.png deleted file mode 100644 index 6af97e5769..0000000000 Binary files a/example/screenshots/android-01.png and /dev/null differ diff --git a/example/screenshots/android-02.png b/example/screenshots/android-02.png deleted file mode 100644 index 5051b70a6c..0000000000 Binary files a/example/screenshots/android-02.png and /dev/null differ diff --git a/example/screenshots/flutter-webrtc-android-example.png b/example/screenshots/flutter-webrtc-android-example.png deleted file mode 100644 index 5b0dafc3f9..0000000000 Binary files a/example/screenshots/flutter-webrtc-android-example.png and /dev/null differ diff --git a/example/screenshots/flutter-webrtc-ios-example.png b/example/screenshots/flutter-webrtc-ios-example.png deleted file mode 100644 index f23c9e697f..0000000000 Binary files a/example/screenshots/flutter-webrtc-ios-example.png and /dev/null differ diff --git a/example/screenshots/ios-01.jpeg b/example/screenshots/ios-01.jpeg deleted file mode 100644 index c472b9058a..0000000000 Binary files a/example/screenshots/ios-01.jpeg and /dev/null differ diff --git a/example/screenshots/ios-02.jpeg b/example/screenshots/ios-02.jpeg deleted file mode 100644 index ef83a95eae..0000000000 Binary files a/example/screenshots/ios-02.jpeg and /dev/null differ diff --git a/example/test/widget_test.dart b/example/test/widget_test.dart index 38eceebfa0..ec7c1a67b5 100644 --- a/example/test/widget_test.dart +++ b/example/test/widget_test.dart @@ -1,25 +1,30 @@ // This is a basic Flutter widget test. -// To perform an interaction with a widget in your test, use the WidgetTester utility that Flutter -// provides. For example, you can send tap and scroll gestures. You can also use WidgetTester to -// find child widgets in the widget tree, read text, and verify that the values of widget properties -// are correct. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility in the flutter_test package. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. import 'package:flutter/material.dart'; import 'package:flutter_test/flutter_test.dart'; -import 'package:webrtc_example/main.dart'; +import 'package:flutter_webrtc_example/main.dart'; void main() { - testWidgets('Verify Platform version', (WidgetTester tester) async { + testWidgets('Counter increments smoke test', (WidgetTester tester) async { // Build our app and trigger a frame. - await tester.pumpWidget(new MyApp()); + await tester.pumpWidget(MyApp()); - // Verify that platform version is retrieved. - expect( - find.byWidgetPredicate( - (Widget widget) => - widget is Text && widget.data.startsWith('Running on:'), - ), - findsOneWidget); + // Verify that our counter starts at 0. + expect(find.text('0'), findsOneWidget); + expect(find.text('1'), findsNothing); + + // Tap the '+' icon and trigger a frame. + await tester.tap(find.byIcon(Icons.add)); + await tester.pump(); + + // Verify that our counter has incremented. + expect(find.text('0'), findsNothing); + expect(find.text('1'), findsOneWidget); }); } diff --git a/example/web/e2ee.worker.dart.js b/example/web/e2ee.worker.dart.js new file mode 100644 index 0000000000..546a5faa8a --- /dev/null +++ b/example/web/e2ee.worker.dart.js @@ -0,0 +1,9907 @@ +// Generated by dart2js (NullSafetyMode.sound, csp, intern-composite-values), the Dart to JavaScript compiler version: 3.7.0. +// The code supports the following hooks: +// dartPrint(message): +// if this function is defined it is called instead of the Dart [print] +// method. +// +// dartMainRunner(main, args): +// if this function is defined, the Dart [main] method will not be invoked +// directly. Instead, a closure that will invoke [main], and its arguments +// [args] is passed to [dartMainRunner]. +// +// dartDeferredLibraryLoader(uri, successCallback, errorCallback, loadId, loadPriority): +// if this function is defined, it will be called when a deferred library +// is loaded. It should load and eval the javascript of `uri`, and call +// successCallback. If it fails to do so, it should call errorCallback with +// an error. The loadId argument is the deferred import that resulted in +// this uri being loaded. The loadPriority argument is an arbitrary argument +// string forwarded from the 'dart2js:load-priority' pragma option. +// dartDeferredLibraryMultiLoader(uris, successCallback, errorCallback, loadId, loadPriority): +// if this function is defined, it will be called when a deferred library +// is loaded. It should load and eval the javascript of every URI in `uris`, +// and call successCallback. If it fails to do so, it should call +// errorCallback with an error. The loadId argument is the deferred import +// that resulted in this uri being loaded. The loadPriority argument is an +// arbitrary argument string forwarded from the 'dart2js:load-priority' +// pragma option. +// +// dartCallInstrumentation(id, qualifiedName): +// if this function is defined, it will be called at each entry of a +// method or constructor. Used only when compiling programs with +// --experiment-call-instrumentation. +(function dartProgram() { + function copyProperties(from, to) { + var keys = Object.keys(from); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + to[key] = from[key]; + } + } + function mixinPropertiesHard(from, to) { + var keys = Object.keys(from); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (!to.hasOwnProperty(key)) { + to[key] = from[key]; + } + } + } + function mixinPropertiesEasy(from, to) { + Object.assign(to, from); + } + var supportsDirectProtoAccess = function() { + var cls = function() { + }; + cls.prototype = {p: {}}; + var object = new cls(); + if (!(Object.getPrototypeOf(object) && Object.getPrototypeOf(object).p === cls.prototype.p)) + return false; + try { + if (typeof navigator != "undefined" && typeof navigator.userAgent == "string" && navigator.userAgent.indexOf("Chrome/") >= 0) + return true; + if (typeof version == "function" && version.length == 0) { + var v = version(); + if (/^\d+\.\d+\.\d+\.\d+$/.test(v)) + return true; + } + } catch (_) { + } + return false; + }(); + function inherit(cls, sup) { + cls.prototype.constructor = cls; + cls.prototype["$is" + cls.name] = cls; + if (sup != null) { + if (supportsDirectProtoAccess) { + Object.setPrototypeOf(cls.prototype, sup.prototype); + return; + } + var clsPrototype = Object.create(sup.prototype); + copyProperties(cls.prototype, clsPrototype); + cls.prototype = clsPrototype; + } + } + function inheritMany(sup, classes) { + for (var i = 0; i < classes.length; i++) { + inherit(classes[i], sup); + } + } + function mixinEasy(cls, mixin) { + mixinPropertiesEasy(mixin.prototype, cls.prototype); + cls.prototype.constructor = cls; + } + function mixinHard(cls, mixin) { + mixinPropertiesHard(mixin.prototype, cls.prototype); + cls.prototype.constructor = cls; + } + function lazy(holder, name, getterName, initializer) { + var uninitializedSentinel = holder; + holder[name] = uninitializedSentinel; + holder[getterName] = function() { + if (holder[name] === uninitializedSentinel) { + holder[name] = initializer(); + } + holder[getterName] = function() { + return this[name]; + }; + return holder[name]; + }; + } + function lazyFinal(holder, name, getterName, initializer) { + var uninitializedSentinel = holder; + holder[name] = uninitializedSentinel; + holder[getterName] = function() { + if (holder[name] === uninitializedSentinel) { + var value = initializer(); + if (holder[name] !== uninitializedSentinel) { + A.throwLateFieldADI(name); + } + holder[name] = value; + } + var finalValue = holder[name]; + holder[getterName] = function() { + return finalValue; + }; + return finalValue; + }; + } + function makeConstList(list) { + list.$flags = 7; + return list; + } + function convertToFastObject(properties) { + function t() { + } + t.prototype = properties; + new t(); + return properties; + } + function convertAllToFastObject(arrayOfObjects) { + for (var i = 0; i < arrayOfObjects.length; ++i) { + convertToFastObject(arrayOfObjects[i]); + } + } + var functionCounter = 0; + function instanceTearOffGetter(isIntercepted, parameters) { + var cache = null; + return isIntercepted ? function(receiver) { + if (cache === null) + cache = A.closureFromTearOff(parameters); + return new cache(receiver, this); + } : function() { + if (cache === null) + cache = A.closureFromTearOff(parameters); + return new cache(this, null); + }; + } + function staticTearOffGetter(parameters) { + var cache = null; + return function() { + if (cache === null) + cache = A.closureFromTearOff(parameters).prototype; + return cache; + }; + } + var typesOffset = 0; + function tearOffParameters(container, isStatic, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, needsDirectAccess) { + if (typeof funType == "number") { + funType += typesOffset; + } + return {co: container, iS: isStatic, iI: isIntercepted, rC: requiredParameterCount, dV: optionalParameterDefaultValues, cs: callNames, fs: funsOrNames, fT: funType, aI: applyIndex || 0, nDA: needsDirectAccess}; + } + function installStaticTearOff(holder, getterName, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex) { + var parameters = tearOffParameters(holder, true, false, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, false); + var getterFunction = staticTearOffGetter(parameters); + holder[getterName] = getterFunction; + } + function installInstanceTearOff(prototype, getterName, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, needsDirectAccess) { + isIntercepted = !!isIntercepted; + var parameters = tearOffParameters(prototype, false, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, funsOrNames, funType, applyIndex, !!needsDirectAccess); + var getterFunction = instanceTearOffGetter(isIntercepted, parameters); + prototype[getterName] = getterFunction; + } + function setOrUpdateInterceptorsByTag(newTags) { + var tags = init.interceptorsByTag; + if (!tags) { + init.interceptorsByTag = newTags; + return; + } + copyProperties(newTags, tags); + } + function setOrUpdateLeafTags(newTags) { + var tags = init.leafTags; + if (!tags) { + init.leafTags = newTags; + return; + } + copyProperties(newTags, tags); + } + function updateTypes(newTypes) { + var types = init.types; + var length = types.length; + types.push.apply(types, newTypes); + return length; + } + function updateHolder(holder, newHolder) { + copyProperties(newHolder, holder); + return holder; + } + var hunkHelpers = function() { + var mkInstance = function(isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, applyIndex) { + return function(container, getterName, name, funType) { + return installInstanceTearOff(container, getterName, isIntercepted, requiredParameterCount, optionalParameterDefaultValues, callNames, [name], funType, applyIndex, false); + }; + }, + mkStatic = function(requiredParameterCount, optionalParameterDefaultValues, callNames, applyIndex) { + return function(container, getterName, name, funType) { + return installStaticTearOff(container, getterName, requiredParameterCount, optionalParameterDefaultValues, callNames, [name], funType, applyIndex); + }; + }; + return {inherit: inherit, inheritMany: inheritMany, mixin: mixinEasy, mixinHard: mixinHard, installStaticTearOff: installStaticTearOff, installInstanceTearOff: installInstanceTearOff, _instance_0u: mkInstance(0, 0, null, ["call$0"], 0), _instance_1u: mkInstance(0, 1, null, ["call$1"], 0), _instance_2u: mkInstance(0, 2, null, ["call$2"], 0), _instance_0i: mkInstance(1, 0, null, ["call$0"], 0), _instance_1i: mkInstance(1, 1, null, ["call$1"], 0), _instance_2i: mkInstance(1, 2, null, ["call$2"], 0), _static_0: mkStatic(0, null, ["call$0"], 0), _static_1: mkStatic(1, null, ["call$1"], 0), _static_2: mkStatic(2, null, ["call$2"], 0), makeConstList: makeConstList, lazy: lazy, lazyFinal: lazyFinal, updateHolder: updateHolder, convertToFastObject: convertToFastObject, updateTypes: updateTypes, setOrUpdateInterceptorsByTag: setOrUpdateInterceptorsByTag, setOrUpdateLeafTags: setOrUpdateLeafTags}; + }(); + function initializeDeferredHunk(hunk) { + typesOffset = init.types.length; + hunk(hunkHelpers, init, holders, $); + } + var J = { + makeDispatchRecord(interceptor, proto, extension, indexability) { + return {i: interceptor, p: proto, e: extension, x: indexability}; + }, + getNativeInterceptor(object) { + var proto, objectProto, $constructor, interceptor, t1, + record = object[init.dispatchPropertyName]; + if (record == null) + if ($.initNativeDispatchFlag == null) { + A.initNativeDispatch(); + record = object[init.dispatchPropertyName]; + } + if (record != null) { + proto = record.p; + if (false === proto) + return record.i; + if (true === proto) + return object; + objectProto = Object.getPrototypeOf(object); + if (proto === objectProto) + return record.i; + if (record.e === objectProto) + throw A.wrapException(A.UnimplementedError$("Return interceptor for " + A.S(proto(object, record)))); + } + $constructor = object.constructor; + if ($constructor == null) + interceptor = null; + else { + t1 = $._JS_INTEROP_INTERCEPTOR_TAG; + if (t1 == null) + t1 = $._JS_INTEROP_INTERCEPTOR_TAG = init.getIsolateTag("_$dart_js"); + interceptor = $constructor[t1]; + } + if (interceptor != null) + return interceptor; + interceptor = A.lookupAndCacheInterceptor(object); + if (interceptor != null) + return interceptor; + if (typeof object == "function") + return B.JavaScriptFunction_methods; + proto = Object.getPrototypeOf(object); + if (proto == null) + return B.PlainJavaScriptObject_methods; + if (proto === Object.prototype) + return B.PlainJavaScriptObject_methods; + if (typeof $constructor == "function") { + t1 = $._JS_INTEROP_INTERCEPTOR_TAG; + if (t1 == null) + t1 = $._JS_INTEROP_INTERCEPTOR_TAG = init.getIsolateTag("_$dart_js"); + Object.defineProperty($constructor, t1, {value: B.UnknownJavaScriptObject_methods, enumerable: false, writable: true, configurable: true}); + return B.UnknownJavaScriptObject_methods; + } + return B.UnknownJavaScriptObject_methods; + }, + JSArray_JSArray$fixed($length, $E) { + if ($length < 0 || $length > 4294967295) + throw A.wrapException(A.RangeError$range($length, 0, 4294967295, "length", null)); + return J.JSArray_JSArray$markFixed(new Array($length), $E); + }, + JSArray_JSArray$markFixed(allocation, $E) { + var t1 = A._setArrayType(allocation, $E._eval$1("JSArray<0>")); + t1.$flags = 1; + return t1; + }, + getInterceptor$(receiver) { + if (typeof receiver == "number") { + if (Math.floor(receiver) == receiver) + return J.JSInt.prototype; + return J.JSNumNotInt.prototype; + } + if (typeof receiver == "string") + return J.JSString.prototype; + if (receiver == null) + return J.JSNull.prototype; + if (typeof receiver == "boolean") + return J.JSBool.prototype; + if (Array.isArray(receiver)) + return J.JSArray.prototype; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + getInterceptor$asx(receiver) { + if (typeof receiver == "string") + return J.JSString.prototype; + if (receiver == null) + return receiver; + if (Array.isArray(receiver)) + return J.JSArray.prototype; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + getInterceptor$ax(receiver) { + if (receiver == null) + return receiver; + if (Array.isArray(receiver)) + return J.JSArray.prototype; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + getInterceptor$x(receiver) { + if (receiver == null) + return receiver; + if (typeof receiver != "object") { + if (typeof receiver == "function") + return J.JavaScriptFunction.prototype; + if (typeof receiver == "symbol") + return J.JavaScriptSymbol.prototype; + if (typeof receiver == "bigint") + return J.JavaScriptBigInt.prototype; + return receiver; + } + if (receiver instanceof A.Object) + return receiver; + return J.getNativeInterceptor(receiver); + }, + get$buffer$x(receiver) { + return J.getInterceptor$x(receiver).get$buffer(receiver); + }, + get$hashCode$(receiver) { + return J.getInterceptor$(receiver).get$hashCode(receiver); + }, + get$iterator$ax(receiver) { + return J.getInterceptor$ax(receiver).get$iterator(receiver); + }, + get$length$asx(receiver) { + return J.getInterceptor$asx(receiver).get$length(receiver); + }, + get$runtimeType$(receiver) { + return J.getInterceptor$(receiver).get$runtimeType(receiver); + }, + $eq$(receiver, a0) { + if (receiver == null) + return a0 == null; + if (typeof receiver != "object") + return a0 != null && receiver === a0; + return J.getInterceptor$(receiver).$eq(receiver, a0); + }, + $index$asx(receiver, a0) { + if (typeof a0 === "number") + if (Array.isArray(receiver) || typeof receiver == "string" || A.isJsIndexable(receiver, receiver[init.dispatchPropertyName])) + if (a0 >>> 0 === a0 && a0 < receiver.length) + return receiver[a0]; + return J.getInterceptor$asx(receiver).$index(receiver, a0); + }, + _setInt8$2$x(receiver, a0, a1) { + return J.getInterceptor$x(receiver)._setInt8$2(receiver, a0, a1); + }, + add$1$ax(receiver, a0) { + return J.getInterceptor$ax(receiver).add$1(receiver, a0); + }, + asUint8List$0$x(receiver) { + return J.getInterceptor$x(receiver).asUint8List$0(receiver); + }, + asUint8List$2$x(receiver, a0, a1) { + return J.getInterceptor$x(receiver).asUint8List$2(receiver, a0, a1); + }, + elementAt$1$ax(receiver, a0) { + return J.getInterceptor$ax(receiver).elementAt$1(receiver, a0); + }, + map$1$1$ax(receiver, a0, $T1) { + return J.getInterceptor$ax(receiver).map$1$1(receiver, a0, $T1); + }, + noSuchMethod$1$(receiver, a0) { + return J.getInterceptor$(receiver).noSuchMethod$1(receiver, a0); + }, + toString$0$(receiver) { + return J.getInterceptor$(receiver).toString$0(receiver); + }, + Interceptor: function Interceptor() { + }, + JSBool: function JSBool() { + }, + JSNull: function JSNull() { + }, + JavaScriptObject: function JavaScriptObject() { + }, + LegacyJavaScriptObject: function LegacyJavaScriptObject() { + }, + PlainJavaScriptObject: function PlainJavaScriptObject() { + }, + UnknownJavaScriptObject: function UnknownJavaScriptObject() { + }, + JavaScriptFunction: function JavaScriptFunction() { + }, + JavaScriptBigInt: function JavaScriptBigInt() { + }, + JavaScriptSymbol: function JavaScriptSymbol() { + }, + JSArray: function JSArray(t0) { + this.$ti = t0; + }, + JSUnmodifiableArray: function JSUnmodifiableArray(t0) { + this.$ti = t0; + }, + ArrayIterator: function ArrayIterator(t0, t1, t2) { + var _ = this; + _._iterable = t0; + _._length = t1; + _._index = 0; + _._current = null; + _.$ti = t2; + }, + JSNumber: function JSNumber() { + }, + JSInt: function JSInt() { + }, + JSNumNotInt: function JSNumNotInt() { + }, + JSString: function JSString() { + } + }, + A = {JS_CONST: function JS_CONST() { + }, + SystemHash_combine(hash, value) { + hash = hash + value & 536870911; + hash = hash + ((hash & 524287) << 10) & 536870911; + return hash ^ hash >>> 6; + }, + SystemHash_finish(hash) { + hash = hash + ((hash & 67108863) << 3) & 536870911; + hash ^= hash >>> 11; + return hash + ((hash & 16383) << 15) & 536870911; + }, + checkNotNullable(value, $name, $T) { + return value; + }, + isToStringVisiting(object) { + var t1, i; + for (t1 = $.toStringVisiting.length, i = 0; i < t1; ++i) + if (object === $.toStringVisiting[i]) + return true; + return false; + }, + MappedIterable_MappedIterable(iterable, $function, $S, $T) { + if (type$.EfficientLengthIterable_dynamic._is(iterable)) + return new A.EfficientLengthMappedIterable(iterable, $function, $S._eval$1("@<0>")._bind$1($T)._eval$1("EfficientLengthMappedIterable<1,2>")); + return new A.MappedIterable(iterable, $function, $S._eval$1("@<0>")._bind$1($T)._eval$1("MappedIterable<1,2>")); + }, + _CopyingBytesBuilder: function _CopyingBytesBuilder(t0) { + this.__internal$_length = 0; + this._buffer = t0; + }, + LateError: function LateError(t0) { + this._message = t0; + }, + SentinelValue: function SentinelValue() { + }, + EfficientLengthIterable: function EfficientLengthIterable() { + }, + ListIterable: function ListIterable() { + }, + ListIterator: function ListIterator(t0, t1, t2) { + var _ = this; + _.__internal$_iterable = t0; + _.__internal$_length = t1; + _.__internal$_index = 0; + _.__internal$_current = null; + _.$ti = t2; + }, + MappedIterable: function MappedIterable(t0, t1, t2) { + this.__internal$_iterable = t0; + this._f = t1; + this.$ti = t2; + }, + EfficientLengthMappedIterable: function EfficientLengthMappedIterable(t0, t1, t2) { + this.__internal$_iterable = t0; + this._f = t1; + this.$ti = t2; + }, + MappedIterator: function MappedIterator(t0, t1, t2) { + var _ = this; + _.__internal$_current = null; + _._iterator = t0; + _._f = t1; + _.$ti = t2; + }, + MappedListIterable: function MappedListIterable(t0, t1, t2) { + this._source = t0; + this._f = t1; + this.$ti = t2; + }, + WhereIterable: function WhereIterable(t0, t1, t2) { + this.__internal$_iterable = t0; + this._f = t1; + this.$ti = t2; + }, + WhereIterator: function WhereIterator(t0, t1, t2) { + this._iterator = t0; + this._f = t1; + this.$ti = t2; + }, + FixedLengthListMixin: function FixedLengthListMixin() { + }, + Symbol: function Symbol(t0) { + this.__internal$_name = t0; + }, + unminifyOrTag(rawClassName) { + var preserved = init.mangledGlobalNames[rawClassName]; + if (preserved != null) + return preserved; + return rawClassName; + }, + isJsIndexable(object, record) { + var result; + if (record != null) { + result = record.x; + if (result != null) + return result; + } + return type$.JavaScriptIndexingBehavior_dynamic._is(object); + }, + S(value) { + var result; + if (typeof value == "string") + return value; + if (typeof value == "number") { + if (value !== 0) + return "" + value; + } else if (true === value) + return "true"; + else if (false === value) + return "false"; + else if (value == null) + return "null"; + result = J.toString$0$(value); + return result; + }, + Primitives_objectHashCode(object) { + var hash, + property = $.Primitives__identityHashCodeProperty; + if (property == null) + property = $.Primitives__identityHashCodeProperty = Symbol("identityHashCode"); + hash = object[property]; + if (hash == null) { + hash = Math.random() * 0x3fffffff | 0; + object[property] = hash; + } + return hash; + }, + Primitives_objectTypeName(object) { + return A.Primitives__objectTypeNameNewRti(object); + }, + Primitives__objectTypeNameNewRti(object) { + var interceptor, dispatchName, $constructor, constructorName; + if (object instanceof A.Object) + return A._rtiToString(A.instanceType(object), null); + interceptor = J.getInterceptor$(object); + if (interceptor === B.Interceptor_methods || interceptor === B.JavaScriptObject_methods || type$.UnknownJavaScriptObject._is(object)) { + dispatchName = B.C_JS_CONST(object); + if (dispatchName !== "Object" && dispatchName !== "") + return dispatchName; + $constructor = object.constructor; + if (typeof $constructor == "function") { + constructorName = $constructor.name; + if (typeof constructorName == "string" && constructorName !== "Object" && constructorName !== "") + return constructorName; + } + } + return A._rtiToString(A.instanceType(object), null); + }, + Primitives_safeToString(object) { + if (typeof object == "number" || A._isBool(object)) + return J.toString$0$(object); + if (typeof object == "string") + return JSON.stringify(object); + if (object instanceof A.Closure) + return object.toString$0(0); + return "Instance of '" + A.Primitives_objectTypeName(object) + "'"; + }, + Primitives_stringFromNativeUint8List(charCodes, start, end) { + var i, result, i0, chunkEnd; + if (end <= 500 && start === 0 && end === charCodes.length) + return String.fromCharCode.apply(null, charCodes); + for (i = start, result = ""; i < end; i = i0) { + i0 = i + 500; + chunkEnd = i0 < end ? i0 : end; + result += String.fromCharCode.apply(null, charCodes.subarray(i, chunkEnd)); + } + return result; + }, + Primitives_lazyAsJsDate(receiver) { + if (receiver.date === void 0) + receiver.date = new Date(receiver._value); + return receiver.date; + }, + Primitives_getYear(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCFullYear() + 0 : A.Primitives_lazyAsJsDate(receiver).getFullYear() + 0; + }, + Primitives_getMonth(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCMonth() + 1 : A.Primitives_lazyAsJsDate(receiver).getMonth() + 1; + }, + Primitives_getDay(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCDate() + 0 : A.Primitives_lazyAsJsDate(receiver).getDate() + 0; + }, + Primitives_getHours(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCHours() + 0 : A.Primitives_lazyAsJsDate(receiver).getHours() + 0; + }, + Primitives_getMinutes(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCMinutes() + 0 : A.Primitives_lazyAsJsDate(receiver).getMinutes() + 0; + }, + Primitives_getSeconds(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCSeconds() + 0 : A.Primitives_lazyAsJsDate(receiver).getSeconds() + 0; + }, + Primitives_getMilliseconds(receiver) { + return receiver.isUtc ? A.Primitives_lazyAsJsDate(receiver).getUTCMilliseconds() + 0 : A.Primitives_lazyAsJsDate(receiver).getMilliseconds() + 0; + }, + Primitives_functionNoSuchMethod($function, positionalArguments, namedArguments) { + var $arguments, namedArgumentList, t1 = {}; + t1.argumentCount = 0; + $arguments = []; + namedArgumentList = []; + t1.argumentCount = positionalArguments.length; + B.JSArray_methods.addAll$1($arguments, positionalArguments); + t1.names = ""; + if (namedArguments != null && namedArguments.__js_helper$_length !== 0) + namedArguments.forEach$1(0, new A.Primitives_functionNoSuchMethod_closure(t1, namedArgumentList, $arguments)); + return J.noSuchMethod$1$($function, new A.JSInvocationMirror(B.Symbol_call, 0, $arguments, namedArgumentList, 0)); + }, + Primitives_applyFunction($function, positionalArguments, namedArguments) { + var t1, argumentCount, jsStub; + if (Array.isArray(positionalArguments)) + t1 = namedArguments == null || namedArguments.__js_helper$_length === 0; + else + t1 = false; + if (t1) { + argumentCount = positionalArguments.length; + if (argumentCount === 0) { + if (!!$function.call$0) + return $function.call$0(); + } else if (argumentCount === 1) { + if (!!$function.call$1) + return $function.call$1(positionalArguments[0]); + } else if (argumentCount === 2) { + if (!!$function.call$2) + return $function.call$2(positionalArguments[0], positionalArguments[1]); + } else if (argumentCount === 3) { + if (!!$function.call$3) + return $function.call$3(positionalArguments[0], positionalArguments[1], positionalArguments[2]); + } else if (argumentCount === 4) { + if (!!$function.call$4) + return $function.call$4(positionalArguments[0], positionalArguments[1], positionalArguments[2], positionalArguments[3]); + } else if (argumentCount === 5) + if (!!$function.call$5) + return $function.call$5(positionalArguments[0], positionalArguments[1], positionalArguments[2], positionalArguments[3], positionalArguments[4]); + jsStub = $function["call" + "$" + argumentCount]; + if (jsStub != null) + return jsStub.apply($function, positionalArguments); + } + return A.Primitives__generalApplyFunction($function, positionalArguments, namedArguments); + }, + Primitives__generalApplyFunction($function, positionalArguments, namedArguments) { + var defaultValuesClosure, t1, defaultValues, interceptor, jsFunction, maxArguments, missingDefaults, keys, _i, defaultValue, used, key, + $arguments = Array.isArray(positionalArguments) ? positionalArguments : A.List_List$of(positionalArguments, true, type$.dynamic), + argumentCount = $arguments.length, + requiredParameterCount = $function.$requiredArgCount; + if (argumentCount < requiredParameterCount) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + defaultValuesClosure = $function.$defaultValues; + t1 = defaultValuesClosure == null; + defaultValues = !t1 ? defaultValuesClosure() : null; + interceptor = J.getInterceptor$($function); + jsFunction = interceptor["call*"]; + if (typeof jsFunction == "string") + jsFunction = interceptor[jsFunction]; + if (t1) { + if (namedArguments != null && namedArguments.__js_helper$_length !== 0) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + if (argumentCount === requiredParameterCount) + return jsFunction.apply($function, $arguments); + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + } + if (Array.isArray(defaultValues)) { + if (namedArguments != null && namedArguments.__js_helper$_length !== 0) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + maxArguments = requiredParameterCount + defaultValues.length; + if (argumentCount > maxArguments) + return A.Primitives_functionNoSuchMethod($function, $arguments, null); + if (argumentCount < maxArguments) { + missingDefaults = defaultValues.slice(argumentCount - requiredParameterCount); + if ($arguments === positionalArguments) + $arguments = A.List_List$of($arguments, true, type$.dynamic); + B.JSArray_methods.addAll$1($arguments, missingDefaults); + } + return jsFunction.apply($function, $arguments); + } else { + if (argumentCount > requiredParameterCount) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + if ($arguments === positionalArguments) + $arguments = A.List_List$of($arguments, true, type$.dynamic); + keys = Object.keys(defaultValues); + if (namedArguments == null) + for (t1 = keys.length, _i = 0; _i < keys.length; keys.length === t1 || (0, A.throwConcurrentModificationError)(keys), ++_i) { + defaultValue = defaultValues[A._asString(keys[_i])]; + if (B.C__Required === defaultValue) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + B.JSArray_methods.add$1($arguments, defaultValue); + } + else { + for (t1 = keys.length, used = 0, _i = 0; _i < keys.length; keys.length === t1 || (0, A.throwConcurrentModificationError)(keys), ++_i) { + key = A._asString(keys[_i]); + if (namedArguments.containsKey$1(key)) { + ++used; + B.JSArray_methods.add$1($arguments, namedArguments.$index(0, key)); + } else { + defaultValue = defaultValues[key]; + if (B.C__Required === defaultValue) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + B.JSArray_methods.add$1($arguments, defaultValue); + } + } + if (used !== namedArguments.__js_helper$_length) + return A.Primitives_functionNoSuchMethod($function, $arguments, namedArguments); + } + return jsFunction.apply($function, $arguments); + } + }, + Primitives_extractStackTrace(error) { + var jsError = error.$thrownJsError; + if (jsError == null) + return null; + return A.getTraceFromException(jsError); + }, + Primitives_trySetStackTrace(error, stackTrace) { + var jsError; + if (error.$thrownJsError == null) { + jsError = A.wrapException(error); + error.$thrownJsError = jsError; + jsError.stack = stackTrace.toString$0(0); + } + }, + iae(argument) { + throw A.wrapException(A.argumentErrorValue(argument)); + }, + ioore(receiver, index) { + if (receiver == null) + J.get$length$asx(receiver); + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + }, + diagnoseIndexError(indexable, index) { + var $length, _s5_ = "index"; + if (!A._isInt(index)) + return new A.ArgumentError(true, index, _s5_, null); + $length = A._asInt(J.get$length$asx(indexable)); + if (index < 0 || index >= $length) + return A.IndexError$withLength(index, $length, indexable, _s5_); + return A.RangeError$value(index, _s5_); + }, + diagnoseRangeError(start, end, $length) { + if (start < 0 || start > $length) + return A.RangeError$range(start, 0, $length, "start", null); + if (end != null) + if (end < start || end > $length) + return A.RangeError$range(end, start, $length, "end", null); + return new A.ArgumentError(true, end, "end", null); + }, + argumentErrorValue(object) { + return new A.ArgumentError(true, object, null, null); + }, + wrapException(ex) { + return A.initializeExceptionWrapper(new Error(), ex); + }, + initializeExceptionWrapper(wrapper, ex) { + var t1; + if (ex == null) + ex = new A.TypeError(); + wrapper.dartException = ex; + t1 = A.toStringWrapper; + if ("defineProperty" in Object) { + Object.defineProperty(wrapper, "message", {get: t1}); + wrapper.name = ""; + } else + wrapper.toString = t1; + return wrapper; + }, + toStringWrapper() { + return J.toString$0$(this.dartException); + }, + throwExpression(ex) { + throw A.wrapException(ex); + }, + throwExpressionWithWrapper(ex, wrapper) { + throw A.initializeExceptionWrapper(wrapper, ex); + }, + throwUnsupportedOperation(o, operation, verb) { + var wrapper; + if (operation == null) + operation = 0; + if (verb == null) + verb = 0; + wrapper = Error(); + A.throwExpressionWithWrapper(A._diagnoseUnsupportedOperation(o, operation, verb), wrapper); + }, + _diagnoseUnsupportedOperation(o, encodedOperation, encodedVerb) { + var operation, table, tableLength, index, verb, object, flags, article, adjective; + if (typeof encodedOperation == "string") + operation = encodedOperation; + else { + table = "[]=;add;removeWhere;retainWhere;removeRange;setRange;setInt8;setInt16;setInt32;setUint8;setUint16;setUint32;setFloat32;setFloat64".split(";"); + tableLength = table.length; + index = encodedOperation; + if (index > tableLength) { + encodedVerb = index / tableLength | 0; + index %= tableLength; + } + operation = table[index]; + } + verb = typeof encodedVerb == "string" ? encodedVerb : "modify;remove from;add to".split(";")[encodedVerb]; + object = type$.List_dynamic._is(o) ? "list" : "ByteData"; + flags = o.$flags | 0; + article = "a "; + if ((flags & 4) !== 0) + adjective = "constant "; + else if ((flags & 2) !== 0) { + adjective = "unmodifiable "; + article = "an "; + } else + adjective = (flags & 1) !== 0 ? "fixed-length " : ""; + return new A.UnsupportedError("'" + operation + "': Cannot " + verb + " " + article + adjective + object); + }, + throwConcurrentModificationError(collection) { + throw A.wrapException(A.ConcurrentModificationError$(collection)); + }, + TypeErrorDecoder_extractPattern(message) { + var match, $arguments, argumentsExpr, expr, method, receiver; + message = A.quoteStringForRegExp(message.replace(String({}), "$receiver$")); + match = message.match(/\\\$[a-zA-Z]+\\\$/g); + if (match == null) + match = A._setArrayType([], type$.JSArray_String); + $arguments = match.indexOf("\\$arguments\\$"); + argumentsExpr = match.indexOf("\\$argumentsExpr\\$"); + expr = match.indexOf("\\$expr\\$"); + method = match.indexOf("\\$method\\$"); + receiver = match.indexOf("\\$receiver\\$"); + return new A.TypeErrorDecoder(message.replace(new RegExp("\\\\\\$arguments\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$argumentsExpr\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$expr\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$method\\\\\\$", "g"), "((?:x|[^x])*)").replace(new RegExp("\\\\\\$receiver\\\\\\$", "g"), "((?:x|[^x])*)"), $arguments, argumentsExpr, expr, method, receiver); + }, + TypeErrorDecoder_provokeCallErrorOn(expression) { + return function($expr$) { + var $argumentsExpr$ = "$arguments$"; + try { + $expr$.$method$($argumentsExpr$); + } catch (e) { + return e.message; + } + }(expression); + }, + TypeErrorDecoder_provokePropertyErrorOn(expression) { + return function($expr$) { + try { + $expr$.$method$; + } catch (e) { + return e.message; + } + }(expression); + }, + JsNoSuchMethodError$(_message, match) { + var t1 = match == null, + t2 = t1 ? null : match.method; + return new A.JsNoSuchMethodError(_message, t2, t1 ? null : match.receiver); + }, + unwrapException(ex) { + var t1; + if (ex == null) + return new A.NullThrownFromJavaScriptException(ex); + if (ex instanceof A.ExceptionAndStackTrace) { + t1 = ex.dartException; + return A.saveStackTrace(ex, t1 == null ? type$.Object._as(t1) : t1); + } + if (typeof ex !== "object") + return ex; + if ("dartException" in ex) + return A.saveStackTrace(ex, ex.dartException); + return A._unwrapNonDartException(ex); + }, + saveStackTrace(ex, error) { + if (type$.Error._is(error)) + if (error.$thrownJsError == null) + error.$thrownJsError = ex; + return error; + }, + _unwrapNonDartException(ex) { + var message, number, ieErrorCode, nsme, notClosure, nullCall, nullLiteralCall, undefCall, undefLiteralCall, nullProperty, undefProperty, undefLiteralProperty, match; + if (!("message" in ex)) + return ex; + message = ex.message; + if ("number" in ex && typeof ex.number == "number") { + number = ex.number; + ieErrorCode = number & 65535; + if ((B.JSInt_methods._shrOtherPositive$1(number, 16) & 8191) === 10) + switch (ieErrorCode) { + case 438: + return A.saveStackTrace(ex, A.JsNoSuchMethodError$(A.S(message) + " (Error " + ieErrorCode + ")", null)); + case 445: + case 5007: + A.S(message); + return A.saveStackTrace(ex, new A.NullError()); + } + } + if (ex instanceof TypeError) { + nsme = $.$get$TypeErrorDecoder_noSuchMethodPattern(); + notClosure = $.$get$TypeErrorDecoder_notClosurePattern(); + nullCall = $.$get$TypeErrorDecoder_nullCallPattern(); + nullLiteralCall = $.$get$TypeErrorDecoder_nullLiteralCallPattern(); + undefCall = $.$get$TypeErrorDecoder_undefinedCallPattern(); + undefLiteralCall = $.$get$TypeErrorDecoder_undefinedLiteralCallPattern(); + nullProperty = $.$get$TypeErrorDecoder_nullPropertyPattern(); + $.$get$TypeErrorDecoder_nullLiteralPropertyPattern(); + undefProperty = $.$get$TypeErrorDecoder_undefinedPropertyPattern(); + undefLiteralProperty = $.$get$TypeErrorDecoder_undefinedLiteralPropertyPattern(); + match = nsme.matchTypeError$1(message); + if (match != null) + return A.saveStackTrace(ex, A.JsNoSuchMethodError$(A._asString(message), match)); + else { + match = notClosure.matchTypeError$1(message); + if (match != null) { + match.method = "call"; + return A.saveStackTrace(ex, A.JsNoSuchMethodError$(A._asString(message), match)); + } else if (nullCall.matchTypeError$1(message) != null || nullLiteralCall.matchTypeError$1(message) != null || undefCall.matchTypeError$1(message) != null || undefLiteralCall.matchTypeError$1(message) != null || nullProperty.matchTypeError$1(message) != null || nullLiteralCall.matchTypeError$1(message) != null || undefProperty.matchTypeError$1(message) != null || undefLiteralProperty.matchTypeError$1(message) != null) { + A._asString(message); + return A.saveStackTrace(ex, new A.NullError()); + } + } + return A.saveStackTrace(ex, new A.UnknownJsTypeError(typeof message == "string" ? message : "")); + } + if (ex instanceof RangeError) { + if (typeof message == "string" && message.indexOf("call stack") !== -1) + return new A.StackOverflowError(); + message = function(ex) { + try { + return String(ex); + } catch (e) { + } + return null; + }(ex); + return A.saveStackTrace(ex, new A.ArgumentError(false, null, null, typeof message == "string" ? message.replace(/^RangeError:\s*/, "") : message)); + } + if (typeof InternalError == "function" && ex instanceof InternalError) + if (typeof message == "string" && message === "too much recursion") + return new A.StackOverflowError(); + return ex; + }, + getTraceFromException(exception) { + var trace; + if (exception instanceof A.ExceptionAndStackTrace) + return exception.stackTrace; + if (exception == null) + return new A._StackTrace(exception); + trace = exception.$cachedTrace; + if (trace != null) + return trace; + trace = new A._StackTrace(exception); + if (typeof exception === "object") + exception.$cachedTrace = trace; + return trace; + }, + objectHashCode(object) { + if (object == null) + return J.get$hashCode$(object); + if (typeof object == "object") + return A.Primitives_objectHashCode(object); + return J.get$hashCode$(object); + }, + fillLiteralMap(keyValuePairs, result) { + var index, index0, index1, + $length = keyValuePairs.length; + for (index = 0; index < $length; index = index1) { + index0 = index + 1; + index1 = index0 + 1; + result.$indexSet(0, keyValuePairs[index], keyValuePairs[index0]); + } + return result; + }, + _invokeClosure(closure, numberOfArguments, arg1, arg2, arg3, arg4) { + type$.Function._as(closure); + switch (A._asInt(numberOfArguments)) { + case 0: + return closure.call$0(); + case 1: + return closure.call$1(arg1); + case 2: + return closure.call$2(arg1, arg2); + case 3: + return closure.call$3(arg1, arg2, arg3); + case 4: + return closure.call$4(arg1, arg2, arg3, arg4); + } + throw A.wrapException(A.Exception_Exception("Unsupported number of arguments for wrapped closure")); + }, + convertDartClosureToJS(closure, arity) { + var $function = closure.$identity; + if (!!$function) + return $function; + $function = A.convertDartClosureToJSUncached(closure, arity); + closure.$identity = $function; + return $function; + }, + convertDartClosureToJSUncached(closure, arity) { + var entry; + switch (arity) { + case 0: + entry = closure.call$0; + break; + case 1: + entry = closure.call$1; + break; + case 2: + entry = closure.call$2; + break; + case 3: + entry = closure.call$3; + break; + case 4: + entry = closure.call$4; + break; + default: + entry = null; + } + if (entry != null) + return entry.bind(closure); + return function(closure, arity, invoke) { + return function(a1, a2, a3, a4) { + return invoke(closure, arity, a1, a2, a3, a4); + }; + }(closure, arity, A._invokeClosure); + }, + Closure_fromTearOff(parameters) { + var $prototype, $constructor, t2, trampoline, applyTrampoline, i, stub, stub0, stubName, stubCallName, + container = parameters.co, + isStatic = parameters.iS, + isIntercepted = parameters.iI, + needsDirectAccess = parameters.nDA, + applyTrampolineIndex = parameters.aI, + funsOrNames = parameters.fs, + callNames = parameters.cs, + $name = funsOrNames[0], + callName = callNames[0], + $function = container[$name], + t1 = parameters.fT; + t1.toString; + $prototype = isStatic ? Object.create(new A.StaticClosure().constructor.prototype) : Object.create(new A.BoundClosure(null, null).constructor.prototype); + $prototype.$initialize = $prototype.constructor; + $constructor = isStatic ? function static_tear_off() { + this.$initialize(); + } : function tear_off(a, b) { + this.$initialize(a, b); + }; + $prototype.constructor = $constructor; + $constructor.prototype = $prototype; + $prototype.$_name = $name; + $prototype.$_target = $function; + t2 = !isStatic; + if (t2) + trampoline = A.Closure_forwardCallTo($name, $function, isIntercepted, needsDirectAccess); + else { + $prototype.$static_name = $name; + trampoline = $function; + } + $prototype.$signature = A.Closure__computeSignatureFunctionNewRti(t1, isStatic, isIntercepted); + $prototype[callName] = trampoline; + for (applyTrampoline = trampoline, i = 1; i < funsOrNames.length; ++i) { + stub = funsOrNames[i]; + if (typeof stub == "string") { + stub0 = container[stub]; + stubName = stub; + stub = stub0; + } else + stubName = ""; + stubCallName = callNames[i]; + if (stubCallName != null) { + if (t2) + stub = A.Closure_forwardCallTo(stubName, stub, isIntercepted, needsDirectAccess); + $prototype[stubCallName] = stub; + } + if (i === applyTrampolineIndex) + applyTrampoline = stub; + } + $prototype["call*"] = applyTrampoline; + $prototype.$requiredArgCount = parameters.rC; + $prototype.$defaultValues = parameters.dV; + return $constructor; + }, + Closure__computeSignatureFunctionNewRti(functionType, isStatic, isIntercepted) { + if (typeof functionType == "number") + return functionType; + if (typeof functionType == "string") { + if (isStatic) + throw A.wrapException("Cannot compute signature for static tearoff."); + return function(recipe, evalOnReceiver) { + return function() { + return evalOnReceiver(this, recipe); + }; + }(functionType, A.BoundClosure_evalRecipe); + } + throw A.wrapException("Error in functionType of tearoff"); + }, + Closure_cspForwardCall(arity, needsDirectAccess, stubName, $function) { + var getReceiver = A.BoundClosure_receiverOf; + switch (needsDirectAccess ? -1 : arity) { + case 0: + return function(entry, receiverOf) { + return function() { + return receiverOf(this)[entry](); + }; + }(stubName, getReceiver); + case 1: + return function(entry, receiverOf) { + return function(a) { + return receiverOf(this)[entry](a); + }; + }(stubName, getReceiver); + case 2: + return function(entry, receiverOf) { + return function(a, b) { + return receiverOf(this)[entry](a, b); + }; + }(stubName, getReceiver); + case 3: + return function(entry, receiverOf) { + return function(a, b, c) { + return receiverOf(this)[entry](a, b, c); + }; + }(stubName, getReceiver); + case 4: + return function(entry, receiverOf) { + return function(a, b, c, d) { + return receiverOf(this)[entry](a, b, c, d); + }; + }(stubName, getReceiver); + case 5: + return function(entry, receiverOf) { + return function(a, b, c, d, e) { + return receiverOf(this)[entry](a, b, c, d, e); + }; + }(stubName, getReceiver); + default: + return function(f, receiverOf) { + return function() { + return f.apply(receiverOf(this), arguments); + }; + }($function, getReceiver); + } + }, + Closure_forwardCallTo(stubName, $function, isIntercepted, needsDirectAccess) { + if (isIntercepted) + return A.Closure_forwardInterceptedCallTo(stubName, $function, needsDirectAccess); + return A.Closure_cspForwardCall($function.length, needsDirectAccess, stubName, $function); + }, + Closure_cspForwardInterceptedCall(arity, needsDirectAccess, stubName, $function) { + var getReceiver = A.BoundClosure_receiverOf, + getInterceptor = A.BoundClosure_interceptorOf; + switch (needsDirectAccess ? -1 : arity) { + case 0: + throw A.wrapException(new A.RuntimeError("Intercepted function with no arguments.")); + case 1: + return function(entry, interceptorOf, receiverOf) { + return function() { + return interceptorOf(this)[entry](receiverOf(this)); + }; + }(stubName, getInterceptor, getReceiver); + case 2: + return function(entry, interceptorOf, receiverOf) { + return function(a) { + return interceptorOf(this)[entry](receiverOf(this), a); + }; + }(stubName, getInterceptor, getReceiver); + case 3: + return function(entry, interceptorOf, receiverOf) { + return function(a, b) { + return interceptorOf(this)[entry](receiverOf(this), a, b); + }; + }(stubName, getInterceptor, getReceiver); + case 4: + return function(entry, interceptorOf, receiverOf) { + return function(a, b, c) { + return interceptorOf(this)[entry](receiverOf(this), a, b, c); + }; + }(stubName, getInterceptor, getReceiver); + case 5: + return function(entry, interceptorOf, receiverOf) { + return function(a, b, c, d) { + return interceptorOf(this)[entry](receiverOf(this), a, b, c, d); + }; + }(stubName, getInterceptor, getReceiver); + case 6: + return function(entry, interceptorOf, receiverOf) { + return function(a, b, c, d, e) { + return interceptorOf(this)[entry](receiverOf(this), a, b, c, d, e); + }; + }(stubName, getInterceptor, getReceiver); + default: + return function(f, interceptorOf, receiverOf) { + return function() { + var a = [receiverOf(this)]; + Array.prototype.push.apply(a, arguments); + return f.apply(interceptorOf(this), a); + }; + }($function, getInterceptor, getReceiver); + } + }, + Closure_forwardInterceptedCallTo(stubName, $function, needsDirectAccess) { + var arity, t1; + if ($.BoundClosure__interceptorFieldNameCache == null) + $.BoundClosure__interceptorFieldNameCache = A.BoundClosure__computeFieldNamed("interceptor"); + if ($.BoundClosure__receiverFieldNameCache == null) + $.BoundClosure__receiverFieldNameCache = A.BoundClosure__computeFieldNamed("receiver"); + arity = $function.length; + t1 = A.Closure_cspForwardInterceptedCall(arity, needsDirectAccess, stubName, $function); + return t1; + }, + closureFromTearOff(parameters) { + return A.Closure_fromTearOff(parameters); + }, + BoundClosure_evalRecipe(closure, recipe) { + return A._Universe_evalInEnvironment(init.typeUniverse, A.instanceType(closure._receiver), recipe); + }, + BoundClosure_receiverOf(closure) { + return closure._receiver; + }, + BoundClosure_interceptorOf(closure) { + return closure._interceptor; + }, + BoundClosure__computeFieldNamed(fieldName) { + var names, i, $name, + template = new A.BoundClosure("receiver", "interceptor"), + t1 = Object.getOwnPropertyNames(template); + t1.$flags = 1; + names = t1; + for (t1 = names.length, i = 0; i < t1; ++i) { + $name = names[i]; + if (template[$name] === fieldName) + return $name; + } + throw A.wrapException(A.ArgumentError$("Field name " + fieldName + " not found.", null)); + }, + boolConversionCheck(value) { + if (value == null) + A.assertThrow("boolean expression must not be null"); + return value; + }, + assertThrow(message) { + throw A.wrapException(new A._AssertionError(message)); + }, + throwCyclicInit(staticName) { + throw A.wrapException(new A._CyclicInitializationError(staticName)); + }, + getIsolateAffinityTag($name) { + return init.getIsolateTag($name); + }, + defineProperty(obj, property, value) { + Object.defineProperty(obj, property, {value: value, enumerable: false, writable: true, configurable: true}); + }, + lookupAndCacheInterceptor(obj) { + var interceptor, interceptorClass, altTag, mark, t1, + tag = A._asString($.getTagFunction.call$1(obj)), + record = $.dispatchRecordsForInstanceTags[tag]; + if (record != null) { + Object.defineProperty(obj, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + return record.i; + } + interceptor = $.interceptorsForUncacheableTags[tag]; + if (interceptor != null) + return interceptor; + interceptorClass = init.interceptorsByTag[tag]; + if (interceptorClass == null) { + altTag = A._asStringQ($.alternateTagFunction.call$2(obj, tag)); + if (altTag != null) { + record = $.dispatchRecordsForInstanceTags[altTag]; + if (record != null) { + Object.defineProperty(obj, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + return record.i; + } + interceptor = $.interceptorsForUncacheableTags[altTag]; + if (interceptor != null) + return interceptor; + interceptorClass = init.interceptorsByTag[altTag]; + tag = altTag; + } + } + if (interceptorClass == null) + return null; + interceptor = interceptorClass.prototype; + mark = tag[0]; + if (mark === "!") { + record = A.makeLeafDispatchRecord(interceptor); + $.dispatchRecordsForInstanceTags[tag] = record; + Object.defineProperty(obj, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + return record.i; + } + if (mark === "~") { + $.interceptorsForUncacheableTags[tag] = interceptor; + return interceptor; + } + if (mark === "-") { + t1 = A.makeLeafDispatchRecord(interceptor); + Object.defineProperty(Object.getPrototypeOf(obj), init.dispatchPropertyName, {value: t1, enumerable: false, writable: true, configurable: true}); + return t1.i; + } + if (mark === "+") + return A.patchInteriorProto(obj, interceptor); + if (mark === "*") + throw A.wrapException(A.UnimplementedError$(tag)); + if (init.leafTags[tag] === true) { + t1 = A.makeLeafDispatchRecord(interceptor); + Object.defineProperty(Object.getPrototypeOf(obj), init.dispatchPropertyName, {value: t1, enumerable: false, writable: true, configurable: true}); + return t1.i; + } else + return A.patchInteriorProto(obj, interceptor); + }, + patchInteriorProto(obj, interceptor) { + var proto = Object.getPrototypeOf(obj); + Object.defineProperty(proto, init.dispatchPropertyName, {value: J.makeDispatchRecord(interceptor, proto, null, null), enumerable: false, writable: true, configurable: true}); + return interceptor; + }, + makeLeafDispatchRecord(interceptor) { + return J.makeDispatchRecord(interceptor, false, null, !!interceptor.$isJavaScriptIndexingBehavior); + }, + makeDefaultDispatchRecord(tag, interceptorClass, proto) { + var interceptor = interceptorClass.prototype; + if (init.leafTags[tag] === true) + return A.makeLeafDispatchRecord(interceptor); + else + return J.makeDispatchRecord(interceptor, proto, null, null); + }, + initNativeDispatch() { + if (true === $.initNativeDispatchFlag) + return; + $.initNativeDispatchFlag = true; + A.initNativeDispatchContinue(); + }, + initNativeDispatchContinue() { + var map, tags, fun, i, tag, proto, record, interceptorClass; + $.dispatchRecordsForInstanceTags = Object.create(null); + $.interceptorsForUncacheableTags = Object.create(null); + A.initHooks(); + map = init.interceptorsByTag; + tags = Object.getOwnPropertyNames(map); + if (typeof window != "undefined") { + window; + fun = function() { + }; + for (i = 0; i < tags.length; ++i) { + tag = tags[i]; + proto = $.prototypeForTagFunction.call$1(tag); + if (proto != null) { + record = A.makeDefaultDispatchRecord(tag, map[tag], proto); + if (record != null) { + Object.defineProperty(proto, init.dispatchPropertyName, {value: record, enumerable: false, writable: true, configurable: true}); + fun.prototype = proto; + } + } + } + } + for (i = 0; i < tags.length; ++i) { + tag = tags[i]; + if (/^[A-Za-z_]/.test(tag)) { + interceptorClass = map[tag]; + map["!" + tag] = interceptorClass; + map["~" + tag] = interceptorClass; + map["-" + tag] = interceptorClass; + map["+" + tag] = interceptorClass; + map["*" + tag] = interceptorClass; + } + } + }, + initHooks() { + var transformers, i, transformer, getTag, getUnknownTag, prototypeForTag, + hooks = B.C_JS_CONST0(); + hooks = A.applyHooksTransformer(B.C_JS_CONST1, A.applyHooksTransformer(B.C_JS_CONST2, A.applyHooksTransformer(B.C_JS_CONST3, A.applyHooksTransformer(B.C_JS_CONST3, A.applyHooksTransformer(B.C_JS_CONST4, A.applyHooksTransformer(B.C_JS_CONST5, A.applyHooksTransformer(B.C_JS_CONST6(B.C_JS_CONST), hooks))))))); + if (typeof dartNativeDispatchHooksTransformer != "undefined") { + transformers = dartNativeDispatchHooksTransformer; + if (typeof transformers == "function") + transformers = [transformers]; + if (Array.isArray(transformers)) + for (i = 0; i < transformers.length; ++i) { + transformer = transformers[i]; + if (typeof transformer == "function") + hooks = transformer(hooks) || hooks; + } + } + getTag = hooks.getTag; + getUnknownTag = hooks.getUnknownTag; + prototypeForTag = hooks.prototypeForTag; + $.getTagFunction = new A.initHooks_closure(getTag); + $.alternateTagFunction = new A.initHooks_closure0(getUnknownTag); + $.prototypeForTagFunction = new A.initHooks_closure1(prototypeForTag); + }, + applyHooksTransformer(transformer, hooks) { + return transformer(hooks) || hooks; + }, + createRecordTypePredicate(shape, fieldRtis) { + var $length = fieldRtis.length, + $function = init.rttc["" + $length + ";" + shape]; + if ($function == null) + return null; + if ($length === 0) + return $function; + if ($length === $function.length) + return $function.apply(null, fieldRtis); + return $function(fieldRtis); + }, + quoteStringForRegExp(string) { + if (/[[\]{}()*+?.\\^$|]/.test(string)) + return string.replace(/[[\]{}()*+?.\\^$|]/g, "\\$&"); + return string; + }, + ConstantMapView: function ConstantMapView(t0, t1) { + this._collection$_map = t0; + this.$ti = t1; + }, + ConstantMap: function ConstantMap() { + }, + ConstantStringMap: function ConstantStringMap(t0, t1, t2) { + this._jsIndex = t0; + this._values = t1; + this.$ti = t2; + }, + _KeysOrValues: function _KeysOrValues(t0, t1) { + this._elements = t0; + this.$ti = t1; + }, + _KeysOrValuesOrElementsIterator: function _KeysOrValuesOrElementsIterator(t0, t1, t2) { + var _ = this; + _._elements = t0; + _.__js_helper$_length = t1; + _.__js_helper$_index = 0; + _.__js_helper$_current = null; + _.$ti = t2; + }, + JSInvocationMirror: function JSInvocationMirror(t0, t1, t2, t3, t4) { + var _ = this; + _._memberName = t0; + _.__js_helper$_kind = t1; + _._arguments = t2; + _._namedArgumentNames = t3; + _._typeArgumentCount = t4; + }, + Primitives_functionNoSuchMethod_closure: function Primitives_functionNoSuchMethod_closure(t0, t1, t2) { + this._box_0 = t0; + this.namedArgumentList = t1; + this.$arguments = t2; + }, + TypeErrorDecoder: function TypeErrorDecoder(t0, t1, t2, t3, t4, t5) { + var _ = this; + _._pattern = t0; + _._arguments = t1; + _._argumentsExpr = t2; + _._expr = t3; + _._method = t4; + _._receiver = t5; + }, + NullError: function NullError() { + }, + JsNoSuchMethodError: function JsNoSuchMethodError(t0, t1, t2) { + this.__js_helper$_message = t0; + this._method = t1; + this._receiver = t2; + }, + UnknownJsTypeError: function UnknownJsTypeError(t0) { + this.__js_helper$_message = t0; + }, + NullThrownFromJavaScriptException: function NullThrownFromJavaScriptException(t0) { + this._irritant = t0; + }, + ExceptionAndStackTrace: function ExceptionAndStackTrace(t0, t1) { + this.dartException = t0; + this.stackTrace = t1; + }, + _StackTrace: function _StackTrace(t0) { + this._exception = t0; + this._trace = null; + }, + Closure: function Closure() { + }, + Closure0Args: function Closure0Args() { + }, + Closure2Args: function Closure2Args() { + }, + TearOffClosure: function TearOffClosure() { + }, + StaticClosure: function StaticClosure() { + }, + BoundClosure: function BoundClosure(t0, t1) { + this._receiver = t0; + this._interceptor = t1; + }, + _CyclicInitializationError: function _CyclicInitializationError(t0) { + this.variableName = t0; + }, + RuntimeError: function RuntimeError(t0) { + this.message = t0; + }, + _AssertionError: function _AssertionError(t0) { + this.message = t0; + }, + _Required: function _Required() { + }, + JsLinkedHashMap: function JsLinkedHashMap(t0) { + var _ = this; + _.__js_helper$_length = 0; + _._last = _._first = _.__js_helper$_rest = _._nums = _._strings = null; + _._modifications = 0; + _.$ti = t0; + }, + LinkedHashMapCell: function LinkedHashMapCell(t0, t1) { + var _ = this; + _.hashMapCellKey = t0; + _.hashMapCellValue = t1; + _._previous = _._next = null; + }, + LinkedHashMapKeysIterable: function LinkedHashMapKeysIterable(t0, t1) { + this._map = t0; + this.$ti = t1; + }, + LinkedHashMapKeyIterator: function LinkedHashMapKeyIterator(t0, t1, t2, t3) { + var _ = this; + _._map = t0; + _._modifications = t1; + _._cell = t2; + _.__js_helper$_current = null; + _.$ti = t3; + }, + initHooks_closure: function initHooks_closure(t0) { + this.getTag = t0; + }, + initHooks_closure0: function initHooks_closure0(t0) { + this.getUnknownTag = t0; + }, + initHooks_closure1: function initHooks_closure1(t0) { + this.prototypeForTag = t0; + }, + _ensureNativeList(list) { + return list; + }, + NativeByteData_NativeByteData($length) { + return new DataView(new ArrayBuffer($length)); + }, + NativeUint8List_NativeUint8List($length) { + return new Uint8Array($length); + }, + NativeUint8List_NativeUint8List$view(buffer, offsetInBytes, $length) { + return $length == null ? new Uint8Array(buffer, offsetInBytes) : new Uint8Array(buffer, offsetInBytes, $length); + }, + _checkValidIndex(index, list, $length) { + if (index >>> 0 !== index || index >= $length) + throw A.wrapException(A.diagnoseIndexError(list, index)); + }, + _checkValidRange(start, end, $length) { + var t1; + if (!(start >>> 0 !== start)) + if (end == null) + t1 = start > $length; + else + t1 = end >>> 0 !== end || start > end || end > $length; + else + t1 = true; + if (t1) + throw A.wrapException(A.diagnoseRangeError(start, end, $length)); + if (end == null) + return $length; + return end; + }, + NativeByteBuffer: function NativeByteBuffer() { + }, + NativeTypedData: function NativeTypedData() { + }, + _UnmodifiableNativeByteBufferView: function _UnmodifiableNativeByteBufferView(t0) { + this._data = t0; + }, + NativeByteData: function NativeByteData() { + }, + NativeTypedArray: function NativeTypedArray() { + }, + NativeTypedArrayOfDouble: function NativeTypedArrayOfDouble() { + }, + NativeTypedArrayOfInt: function NativeTypedArrayOfInt() { + }, + NativeFloat32List: function NativeFloat32List() { + }, + NativeFloat64List: function NativeFloat64List() { + }, + NativeInt16List: function NativeInt16List() { + }, + NativeInt32List: function NativeInt32List() { + }, + NativeInt8List: function NativeInt8List() { + }, + NativeUint16List: function NativeUint16List() { + }, + NativeUint32List: function NativeUint32List() { + }, + NativeUint8ClampedList: function NativeUint8ClampedList() { + }, + NativeUint8List: function NativeUint8List() { + }, + _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin: function _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin() { + }, + _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin: function _NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin() { + }, + _NativeTypedArrayOfInt_NativeTypedArray_ListMixin: function _NativeTypedArrayOfInt_NativeTypedArray_ListMixin() { + }, + _NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin: function _NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin() { + }, + Rti__getQuestionFromStar(universe, rti) { + var question = rti._precomputed1; + return question == null ? rti._precomputed1 = A._Universe__lookupQuestionRti(universe, rti._primary, true) : question; + }, + Rti__getFutureFromFutureOr(universe, rti) { + var future = rti._precomputed1; + return future == null ? rti._precomputed1 = A._Universe__lookupInterfaceRti(universe, "Future", [rti._primary]) : future; + }, + Rti__isUnionOfFunctionType(rti) { + var kind = rti._kind; + if (kind === 6 || kind === 7 || kind === 8) + return A.Rti__isUnionOfFunctionType(rti._primary); + return kind === 12 || kind === 13; + }, + Rti__getCanonicalRecipe(rti) { + return rti._canonicalRecipe; + }, + findType(recipe) { + return A._Universe_eval(init.typeUniverse, recipe, false); + }, + _substitute(universe, rti, typeArguments, depth) { + var baseType, substitutedBaseType, interfaceTypeArguments, substitutedInterfaceTypeArguments, base, substitutedBase, $arguments, substitutedArguments, t1, fields, substitutedFields, returnType, substitutedReturnType, functionParameters, substitutedFunctionParameters, bounds, substitutedBounds, index, argument, + kind = rti._kind; + switch (kind) { + case 5: + case 1: + case 2: + case 3: + case 4: + return rti; + case 6: + baseType = rti._primary; + substitutedBaseType = A._substitute(universe, baseType, typeArguments, depth); + if (substitutedBaseType === baseType) + return rti; + return A._Universe__lookupStarRti(universe, substitutedBaseType, true); + case 7: + baseType = rti._primary; + substitutedBaseType = A._substitute(universe, baseType, typeArguments, depth); + if (substitutedBaseType === baseType) + return rti; + return A._Universe__lookupQuestionRti(universe, substitutedBaseType, true); + case 8: + baseType = rti._primary; + substitutedBaseType = A._substitute(universe, baseType, typeArguments, depth); + if (substitutedBaseType === baseType) + return rti; + return A._Universe__lookupFutureOrRti(universe, substitutedBaseType, true); + case 9: + interfaceTypeArguments = rti._rest; + substitutedInterfaceTypeArguments = A._substituteArray(universe, interfaceTypeArguments, typeArguments, depth); + if (substitutedInterfaceTypeArguments === interfaceTypeArguments) + return rti; + return A._Universe__lookupInterfaceRti(universe, rti._primary, substitutedInterfaceTypeArguments); + case 10: + base = rti._primary; + substitutedBase = A._substitute(universe, base, typeArguments, depth); + $arguments = rti._rest; + substitutedArguments = A._substituteArray(universe, $arguments, typeArguments, depth); + if (substitutedBase === base && substitutedArguments === $arguments) + return rti; + return A._Universe__lookupBindingRti(universe, substitutedBase, substitutedArguments); + case 11: + t1 = rti._primary; + fields = rti._rest; + substitutedFields = A._substituteArray(universe, fields, typeArguments, depth); + if (substitutedFields === fields) + return rti; + return A._Universe__lookupRecordRti(universe, t1, substitutedFields); + case 12: + returnType = rti._primary; + substitutedReturnType = A._substitute(universe, returnType, typeArguments, depth); + functionParameters = rti._rest; + substitutedFunctionParameters = A._substituteFunctionParameters(universe, functionParameters, typeArguments, depth); + if (substitutedReturnType === returnType && substitutedFunctionParameters === functionParameters) + return rti; + return A._Universe__lookupFunctionRti(universe, substitutedReturnType, substitutedFunctionParameters); + case 13: + bounds = rti._rest; + depth += bounds.length; + substitutedBounds = A._substituteArray(universe, bounds, typeArguments, depth); + base = rti._primary; + substitutedBase = A._substitute(universe, base, typeArguments, depth); + if (substitutedBounds === bounds && substitutedBase === base) + return rti; + return A._Universe__lookupGenericFunctionRti(universe, substitutedBase, substitutedBounds, true); + case 14: + index = rti._primary; + if (index < depth) + return rti; + argument = typeArguments[index - depth]; + if (argument == null) + return rti; + return argument; + default: + throw A.wrapException(A.AssertionError$("Attempted to substitute unexpected RTI kind " + kind)); + } + }, + _substituteArray(universe, rtiArray, typeArguments, depth) { + var changed, i, rti, substitutedRti, + $length = rtiArray.length, + result = A._Utils_newArrayOrEmpty($length); + for (changed = false, i = 0; i < $length; ++i) { + rti = rtiArray[i]; + substitutedRti = A._substitute(universe, rti, typeArguments, depth); + if (substitutedRti !== rti) + changed = true; + result[i] = substitutedRti; + } + return changed ? result : rtiArray; + }, + _substituteNamed(universe, namedArray, typeArguments, depth) { + var changed, i, t1, t2, rti, substitutedRti, + $length = namedArray.length, + result = A._Utils_newArrayOrEmpty($length); + for (changed = false, i = 0; i < $length; i += 3) { + t1 = namedArray[i]; + t2 = namedArray[i + 1]; + rti = namedArray[i + 2]; + substitutedRti = A._substitute(universe, rti, typeArguments, depth); + if (substitutedRti !== rti) + changed = true; + result.splice(i, 3, t1, t2, substitutedRti); + } + return changed ? result : namedArray; + }, + _substituteFunctionParameters(universe, functionParameters, typeArguments, depth) { + var result, + requiredPositional = functionParameters._requiredPositional, + substitutedRequiredPositional = A._substituteArray(universe, requiredPositional, typeArguments, depth), + optionalPositional = functionParameters._optionalPositional, + substitutedOptionalPositional = A._substituteArray(universe, optionalPositional, typeArguments, depth), + named = functionParameters._named, + substitutedNamed = A._substituteNamed(universe, named, typeArguments, depth); + if (substitutedRequiredPositional === requiredPositional && substitutedOptionalPositional === optionalPositional && substitutedNamed === named) + return functionParameters; + result = new A._FunctionParameters(); + result._requiredPositional = substitutedRequiredPositional; + result._optionalPositional = substitutedOptionalPositional; + result._named = substitutedNamed; + return result; + }, + _setArrayType(target, rti) { + target[init.arrayRti] = rti; + return target; + }, + closureFunctionType(closure) { + var signature = closure.$signature; + if (signature != null) { + if (typeof signature == "number") + return A.getTypeFromTypesTable(signature); + return closure.$signature(); + } + return null; + }, + instanceOrFunctionType(object, testRti) { + var rti; + if (A.Rti__isUnionOfFunctionType(testRti)) + if (object instanceof A.Closure) { + rti = A.closureFunctionType(object); + if (rti != null) + return rti; + } + return A.instanceType(object); + }, + instanceType(object) { + if (object instanceof A.Object) + return A._instanceType(object); + if (Array.isArray(object)) + return A._arrayInstanceType(object); + return A._instanceTypeFromConstructor(J.getInterceptor$(object)); + }, + _arrayInstanceType(object) { + var rti = object[init.arrayRti], + defaultRti = type$.JSArray_dynamic; + if (rti == null) + return defaultRti; + if (rti.constructor !== defaultRti.constructor) + return defaultRti; + return rti; + }, + _instanceType(object) { + var rti = object.$ti; + return rti != null ? rti : A._instanceTypeFromConstructor(object); + }, + _instanceTypeFromConstructor(instance) { + var $constructor = instance.constructor, + probe = $constructor.$ccache; + if (probe != null) + return probe; + return A._instanceTypeFromConstructorMiss(instance, $constructor); + }, + _instanceTypeFromConstructorMiss(instance, $constructor) { + var effectiveConstructor = instance instanceof A.Closure ? Object.getPrototypeOf(Object.getPrototypeOf(instance)).constructor : $constructor, + rti = A._Universe_findErasedType(init.typeUniverse, effectiveConstructor.name); + $constructor.$ccache = rti; + return rti; + }, + getTypeFromTypesTable(index) { + var rti, + table = init.types, + type = table[index]; + if (typeof type == "string") { + rti = A._Universe_eval(init.typeUniverse, type, false); + table[index] = rti; + return rti; + } + return type; + }, + getRuntimeTypeOfDartObject(object) { + return A.createRuntimeType(A._instanceType(object)); + }, + _structuralTypeOf(object) { + var functionRti = object instanceof A.Closure ? A.closureFunctionType(object) : null; + if (functionRti != null) + return functionRti; + if (type$.TrustedGetRuntimeType._is(object)) + return J.get$runtimeType$(object)._rti; + if (Array.isArray(object)) + return A._arrayInstanceType(object); + return A.instanceType(object); + }, + createRuntimeType(rti) { + var t1 = rti._cachedRuntimeType; + return t1 == null ? rti._cachedRuntimeType = A._createRuntimeType(rti) : t1; + }, + _createRuntimeType(rti) { + var starErasedRti, t1, + s = rti._canonicalRecipe, + starErasedRecipe = s.replace(/\*/g, ""); + if (starErasedRecipe === s) + return rti._cachedRuntimeType = new A._Type(rti); + starErasedRti = A._Universe_eval(init.typeUniverse, starErasedRecipe, true); + t1 = starErasedRti._cachedRuntimeType; + return t1 == null ? starErasedRti._cachedRuntimeType = A._createRuntimeType(starErasedRti) : t1; + }, + typeLiteral(recipe) { + return A.createRuntimeType(A._Universe_eval(init.typeUniverse, recipe, false)); + }, + _installSpecializedIsTest(object) { + var t1, unstarred, unstarredKind, isFn, $name, predicate, testRti = this; + if (testRti === type$.Object) + return A._finishIsFn(testRti, object, A._isObject); + if (!A.isSoundTopType(testRti)) + t1 = testRti === type$.legacy_Object; + else + t1 = true; + if (t1) + return A._finishIsFn(testRti, object, A._isTop); + t1 = testRti._kind; + if (t1 === 7) + return A._finishIsFn(testRti, object, A._generalNullableIsTestImplementation); + if (t1 === 1) + return A._finishIsFn(testRti, object, A._isNever); + unstarred = t1 === 6 ? testRti._primary : testRti; + unstarredKind = unstarred._kind; + if (unstarredKind === 8) + return A._finishIsFn(testRti, object, A._isFutureOr); + if (unstarred === type$.int) + isFn = A._isInt; + else if (unstarred === type$.double || unstarred === type$.num) + isFn = A._isNum; + else if (unstarred === type$.String) + isFn = A._isString; + else + isFn = unstarred === type$.bool ? A._isBool : null; + if (isFn != null) + return A._finishIsFn(testRti, object, isFn); + if (unstarredKind === 9) { + $name = unstarred._primary; + if (unstarred._rest.every(A.isDefinitelyTopType)) { + testRti._specializedTestResource = "$is" + $name; + if ($name === "List") + return A._finishIsFn(testRti, object, A._isListTestViaProperty); + return A._finishIsFn(testRti, object, A._isTestViaProperty); + } + } else if (unstarredKind === 11) { + predicate = A.createRecordTypePredicate(unstarred._primary, unstarred._rest); + return A._finishIsFn(testRti, object, predicate == null ? A._isNever : predicate); + } + return A._finishIsFn(testRti, object, A._generalIsTestImplementation); + }, + _finishIsFn(testRti, object, isFn) { + testRti._is = isFn; + return testRti._is(object); + }, + _installSpecializedAsCheck(object) { + var t1, testRti = this, + asFn = A._generalAsCheckImplementation; + if (!A.isSoundTopType(testRti)) + t1 = testRti === type$.legacy_Object; + else + t1 = true; + if (t1) + asFn = A._asTop; + else if (testRti === type$.Object) + asFn = A._asObject; + else { + t1 = A.isNullable(testRti); + if (t1) + asFn = A._generalNullableAsCheckImplementation; + } + testRti._as = asFn; + return testRti._as(object); + }, + _nullIs(testRti) { + var kind = testRti._kind, + t1 = true; + if (!A.isSoundTopType(testRti)) + if (!(testRti === type$.legacy_Object)) + if (!(testRti === type$.legacy_Never)) + if (kind !== 7) + if (!(kind === 6 && A._nullIs(testRti._primary))) + t1 = kind === 8 && A._nullIs(testRti._primary) || testRti === type$.Null || testRti === type$.JSNull; + return t1; + }, + _generalIsTestImplementation(object) { + var testRti = this; + if (object == null) + return A._nullIs(testRti); + return A.isSubtype(init.typeUniverse, A.instanceOrFunctionType(object, testRti), testRti); + }, + _generalNullableIsTestImplementation(object) { + if (object == null) + return true; + return this._primary._is(object); + }, + _isTestViaProperty(object) { + var tag, testRti = this; + if (object == null) + return A._nullIs(testRti); + tag = testRti._specializedTestResource; + if (object instanceof A.Object) + return !!object[tag]; + return !!J.getInterceptor$(object)[tag]; + }, + _isListTestViaProperty(object) { + var tag, testRti = this; + if (object == null) + return A._nullIs(testRti); + if (typeof object != "object") + return false; + if (Array.isArray(object)) + return true; + tag = testRti._specializedTestResource; + if (object instanceof A.Object) + return !!object[tag]; + return !!J.getInterceptor$(object)[tag]; + }, + _generalAsCheckImplementation(object) { + var testRti = this; + if (object == null) { + if (A.isNullable(testRti)) + return object; + } else if (testRti._is(object)) + return object; + A._failedAsCheck(object, testRti); + }, + _generalNullableAsCheckImplementation(object) { + var testRti = this; + if (object == null) + return object; + else if (testRti._is(object)) + return object; + A._failedAsCheck(object, testRti); + }, + _failedAsCheck(object, testRti) { + throw A.wrapException(A._TypeError$fromMessage(A._Error_compose(object, A._rtiToString(testRti, null)))); + }, + _Error_compose(object, checkedTypeDescription) { + return A.Error_safeToString(object) + ": type '" + A._rtiToString(A._structuralTypeOf(object), null) + "' is not a subtype of type '" + checkedTypeDescription + "'"; + }, + _TypeError$fromMessage(message) { + return new A._TypeError("TypeError: " + message); + }, + _TypeError__TypeError$forType(object, type) { + return new A._TypeError("TypeError: " + A._Error_compose(object, type)); + }, + _isFutureOr(object) { + var testRti = this, + unstarred = testRti._kind === 6 ? testRti._primary : testRti; + return unstarred._primary._is(object) || A.Rti__getFutureFromFutureOr(init.typeUniverse, unstarred)._is(object); + }, + _isObject(object) { + return object != null; + }, + _asObject(object) { + if (object != null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "Object")); + }, + _isTop(object) { + return true; + }, + _asTop(object) { + return object; + }, + _isNever(object) { + return false; + }, + _isBool(object) { + return true === object || false === object; + }, + _asBool(object) { + if (true === object) + return true; + if (false === object) + return false; + throw A.wrapException(A._TypeError__TypeError$forType(object, "bool")); + }, + _asBoolS(object) { + if (true === object) + return true; + if (false === object) + return false; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "bool")); + }, + _asBoolQ(object) { + if (true === object) + return true; + if (false === object) + return false; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "bool?")); + }, + _asDouble(object) { + if (typeof object == "number") + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "double")); + }, + _asDoubleS(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "double")); + }, + _asDoubleQ(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "double?")); + }, + _isInt(object) { + return typeof object == "number" && Math.floor(object) === object; + }, + _asInt(object) { + if (typeof object == "number" && Math.floor(object) === object) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "int")); + }, + _asIntS(object) { + if (typeof object == "number" && Math.floor(object) === object) + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "int")); + }, + _asIntQ(object) { + if (typeof object == "number" && Math.floor(object) === object) + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "int?")); + }, + _isNum(object) { + return typeof object == "number"; + }, + _asNum(object) { + if (typeof object == "number") + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "num")); + }, + _asNumS(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "num")); + }, + _asNumQ(object) { + if (typeof object == "number") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "num?")); + }, + _isString(object) { + return typeof object == "string"; + }, + _asString(object) { + if (typeof object == "string") + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "String")); + }, + _asStringS(object) { + if (typeof object == "string") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "String")); + }, + _asStringQ(object) { + if (typeof object == "string") + return object; + if (object == null) + return object; + throw A.wrapException(A._TypeError__TypeError$forType(object, "String?")); + }, + _rtiArrayToString(array, genericContext) { + var s, sep, i; + for (s = "", sep = "", i = 0; i < array.length; ++i, sep = ", ") + s += sep + A._rtiToString(array[i], genericContext); + return s; + }, + _recordRtiToString(recordType, genericContext) { + var fieldCount, names, namesIndex, s, comma, i, + partialShape = recordType._primary, + fields = recordType._rest; + if ("" === partialShape) + return "(" + A._rtiArrayToString(fields, genericContext) + ")"; + fieldCount = fields.length; + names = partialShape.split(","); + namesIndex = names.length - fieldCount; + for (s = "(", comma = "", i = 0; i < fieldCount; ++i, comma = ", ") { + s += comma; + if (namesIndex === 0) + s += "{"; + s += A._rtiToString(fields[i], genericContext); + if (namesIndex >= 0) + s += " " + names[namesIndex]; + ++namesIndex; + } + return s + "})"; + }, + _functionRtiToString(functionType, genericContext, bounds) { + var boundsLength, offset, i, t1, t2, typeParametersText, typeSep, t3, t4, boundRti, kind, parameters, requiredPositional, requiredPositionalLength, optionalPositional, optionalPositionalLength, named, namedLength, returnTypeText, argumentsText, sep, _s2_ = ", ", outerContextLength = null; + if (bounds != null) { + boundsLength = bounds.length; + if (genericContext == null) + genericContext = A._setArrayType([], type$.JSArray_String); + else + outerContextLength = genericContext.length; + offset = genericContext.length; + for (i = boundsLength; i > 0; --i) + B.JSArray_methods.add$1(genericContext, "T" + (offset + i)); + for (t1 = type$.nullable_Object, t2 = type$.legacy_Object, typeParametersText = "<", typeSep = "", i = 0; i < boundsLength; ++i, typeSep = _s2_) { + t3 = genericContext.length; + t4 = t3 - 1 - i; + if (!(t4 >= 0)) + return A.ioore(genericContext, t4); + typeParametersText = typeParametersText + typeSep + genericContext[t4]; + boundRti = bounds[i]; + kind = boundRti._kind; + if (!(kind === 2 || kind === 3 || kind === 4 || kind === 5 || boundRti === t1)) + t3 = boundRti === t2; + else + t3 = true; + if (!t3) + typeParametersText += " extends " + A._rtiToString(boundRti, genericContext); + } + typeParametersText += ">"; + } else + typeParametersText = ""; + t1 = functionType._primary; + parameters = functionType._rest; + requiredPositional = parameters._requiredPositional; + requiredPositionalLength = requiredPositional.length; + optionalPositional = parameters._optionalPositional; + optionalPositionalLength = optionalPositional.length; + named = parameters._named; + namedLength = named.length; + returnTypeText = A._rtiToString(t1, genericContext); + for (argumentsText = "", sep = "", i = 0; i < requiredPositionalLength; ++i, sep = _s2_) + argumentsText += sep + A._rtiToString(requiredPositional[i], genericContext); + if (optionalPositionalLength > 0) { + argumentsText += sep + "["; + for (sep = "", i = 0; i < optionalPositionalLength; ++i, sep = _s2_) + argumentsText += sep + A._rtiToString(optionalPositional[i], genericContext); + argumentsText += "]"; + } + if (namedLength > 0) { + argumentsText += sep + "{"; + for (sep = "", i = 0; i < namedLength; i += 3, sep = _s2_) { + argumentsText += sep; + if (named[i + 1]) + argumentsText += "required "; + argumentsText += A._rtiToString(named[i + 2], genericContext) + " " + named[i]; + } + argumentsText += "}"; + } + if (outerContextLength != null) { + genericContext.toString; + genericContext.length = outerContextLength; + } + return typeParametersText + "(" + argumentsText + ") => " + returnTypeText; + }, + _rtiToString(rti, genericContext) { + var questionArgument, s, argumentKind, $name, $arguments, t1, t2, + kind = rti._kind; + if (kind === 5) + return "erased"; + if (kind === 2) + return "dynamic"; + if (kind === 3) + return "void"; + if (kind === 1) + return "Never"; + if (kind === 4) + return "any"; + if (kind === 6) + return A._rtiToString(rti._primary, genericContext); + if (kind === 7) { + questionArgument = rti._primary; + s = A._rtiToString(questionArgument, genericContext); + argumentKind = questionArgument._kind; + return (argumentKind === 12 || argumentKind === 13 ? "(" + s + ")" : s) + "?"; + } + if (kind === 8) + return "FutureOr<" + A._rtiToString(rti._primary, genericContext) + ">"; + if (kind === 9) { + $name = A._unminifyOrTag(rti._primary); + $arguments = rti._rest; + return $arguments.length > 0 ? $name + ("<" + A._rtiArrayToString($arguments, genericContext) + ">") : $name; + } + if (kind === 11) + return A._recordRtiToString(rti, genericContext); + if (kind === 12) + return A._functionRtiToString(rti, genericContext, null); + if (kind === 13) + return A._functionRtiToString(rti._primary, genericContext, rti._rest); + if (kind === 14) { + t1 = rti._primary; + t2 = genericContext.length; + t1 = t2 - 1 - t1; + if (!(t1 >= 0 && t1 < t2)) + return A.ioore(genericContext, t1); + return genericContext[t1]; + } + return "?"; + }, + _unminifyOrTag(rawClassName) { + var preserved = init.mangledGlobalNames[rawClassName]; + if (preserved != null) + return preserved; + return rawClassName; + }, + _Universe_findRule(universe, targetType) { + var rule = universe.tR[targetType]; + for (; typeof rule == "string";) + rule = universe.tR[rule]; + return rule; + }, + _Universe_findErasedType(universe, cls) { + var $length, erased, $arguments, i, $interface, + t1 = universe.eT, + probe = t1[cls]; + if (probe == null) + return A._Universe_eval(universe, cls, false); + else if (typeof probe == "number") { + $length = probe; + erased = A._Universe__lookupTerminalRti(universe, 5, "#"); + $arguments = A._Utils_newArrayOrEmpty($length); + for (i = 0; i < $length; ++i) + $arguments[i] = erased; + $interface = A._Universe__lookupInterfaceRti(universe, cls, $arguments); + t1[cls] = $interface; + return $interface; + } else + return probe; + }, + _Universe_addRules(universe, rules) { + return A._Utils_objectAssign(universe.tR, rules); + }, + _Universe_addErasedTypes(universe, types) { + return A._Utils_objectAssign(universe.eT, types); + }, + _Universe_eval(universe, recipe, normalize) { + var rti, + t1 = universe.eC, + probe = t1.get(recipe); + if (probe != null) + return probe; + rti = A._Parser_parse(A._Parser_create(universe, null, recipe, normalize)); + t1.set(recipe, rti); + return rti; + }, + _Universe_evalInEnvironment(universe, environment, recipe) { + var probe, rti, + cache = environment._evalCache; + if (cache == null) + cache = environment._evalCache = new Map(); + probe = cache.get(recipe); + if (probe != null) + return probe; + rti = A._Parser_parse(A._Parser_create(universe, environment, recipe, true)); + cache.set(recipe, rti); + return rti; + }, + _Universe_bind(universe, environment, argumentsRti) { + var argumentsRecipe, probe, rti, + cache = environment._bindCache; + if (cache == null) + cache = environment._bindCache = new Map(); + argumentsRecipe = argumentsRti._canonicalRecipe; + probe = cache.get(argumentsRecipe); + if (probe != null) + return probe; + rti = A._Universe__lookupBindingRti(universe, environment, argumentsRti._kind === 10 ? argumentsRti._rest : [argumentsRti]); + cache.set(argumentsRecipe, rti); + return rti; + }, + _Universe__installTypeTests(universe, rti) { + rti._as = A._installSpecializedAsCheck; + rti._is = A._installSpecializedIsTest; + return rti; + }, + _Universe__lookupTerminalRti(universe, kind, key) { + var rti, t1, + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = kind; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupStarRti(universe, baseType, normalize) { + var t1, + key = baseType._canonicalRecipe + "*", + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createStarRti(universe, baseType, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createStarRti(universe, baseType, key, normalize) { + var baseKind, t1, rti; + if (normalize) { + baseKind = baseType._kind; + if (!A.isSoundTopType(baseType)) + t1 = baseType === type$.Null || baseType === type$.JSNull || baseKind === 7 || baseKind === 6; + else + t1 = true; + if (t1) + return baseType; + } + rti = new A.Rti(null, null); + rti._kind = 6; + rti._primary = baseType; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Universe__lookupQuestionRti(universe, baseType, normalize) { + var t1, + key = baseType._canonicalRecipe + "?", + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createQuestionRti(universe, baseType, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createQuestionRti(universe, baseType, key, normalize) { + var baseKind, t1, starArgument, rti; + if (normalize) { + baseKind = baseType._kind; + t1 = true; + if (!A.isSoundTopType(baseType)) + if (!(baseType === type$.Null || baseType === type$.JSNull)) + if (baseKind !== 7) + t1 = baseKind === 8 && A.isNullable(baseType._primary); + if (t1) + return baseType; + else if (baseKind === 1 || baseType === type$.legacy_Never) + return type$.Null; + else if (baseKind === 6) { + starArgument = baseType._primary; + if (starArgument._kind === 8 && A.isNullable(starArgument._primary)) + return starArgument; + else + return A.Rti__getQuestionFromStar(universe, baseType); + } + } + rti = new A.Rti(null, null); + rti._kind = 7; + rti._primary = baseType; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Universe__lookupFutureOrRti(universe, baseType, normalize) { + var t1, + key = baseType._canonicalRecipe + "/", + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createFutureOrRti(universe, baseType, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createFutureOrRti(universe, baseType, key, normalize) { + var t1, rti; + if (normalize) { + t1 = baseType._kind; + if (A.isSoundTopType(baseType) || baseType === type$.Object || baseType === type$.legacy_Object) + return baseType; + else if (t1 === 1) + return A._Universe__lookupInterfaceRti(universe, "Future", [baseType]); + else if (baseType === type$.Null || baseType === type$.JSNull) + return type$.nullable_Future_Null; + } + rti = new A.Rti(null, null); + rti._kind = 8; + rti._primary = baseType; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Universe__lookupGenericFunctionParameterRti(universe, index) { + var rti, t1, + key = "" + index + "^", + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 14; + rti._primary = index; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__canonicalRecipeJoin($arguments) { + var s, sep, i, + $length = $arguments.length; + for (s = "", sep = "", i = 0; i < $length; ++i, sep = ",") + s += sep + $arguments[i]._canonicalRecipe; + return s; + }, + _Universe__canonicalRecipeJoinNamed($arguments) { + var s, sep, i, t1, nameSep, + $length = $arguments.length; + for (s = "", sep = "", i = 0; i < $length; i += 3, sep = ",") { + t1 = $arguments[i]; + nameSep = $arguments[i + 1] ? "!" : ":"; + s += sep + t1 + nameSep + $arguments[i + 2]._canonicalRecipe; + } + return s; + }, + _Universe__lookupInterfaceRti(universe, $name, $arguments) { + var probe, rti, t1, + s = $name; + if ($arguments.length > 0) + s += "<" + A._Universe__canonicalRecipeJoin($arguments) + ">"; + probe = universe.eC.get(s); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 9; + rti._primary = $name; + rti._rest = $arguments; + if ($arguments.length > 0) + rti._precomputed1 = $arguments[0]; + rti._canonicalRecipe = s; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(s, t1); + return t1; + }, + _Universe__lookupBindingRti(universe, base, $arguments) { + var newBase, newArguments, key, probe, rti, t1; + if (base._kind === 10) { + newBase = base._primary; + newArguments = base._rest.concat($arguments); + } else { + newArguments = $arguments; + newBase = base; + } + key = newBase._canonicalRecipe + (";<" + A._Universe__canonicalRecipeJoin(newArguments) + ">"); + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 10; + rti._primary = newBase; + rti._rest = newArguments; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupRecordRti(universe, partialShapeTag, fields) { + var rti, t1, + key = "+" + (partialShapeTag + "(" + A._Universe__canonicalRecipeJoin(fields) + ")"), + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 11; + rti._primary = partialShapeTag; + rti._rest = fields; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupFunctionRti(universe, returnType, parameters) { + var sep, key, probe, rti, t1, + s = returnType._canonicalRecipe, + requiredPositional = parameters._requiredPositional, + requiredPositionalLength = requiredPositional.length, + optionalPositional = parameters._optionalPositional, + optionalPositionalLength = optionalPositional.length, + named = parameters._named, + namedLength = named.length, + recipe = "(" + A._Universe__canonicalRecipeJoin(requiredPositional); + if (optionalPositionalLength > 0) { + sep = requiredPositionalLength > 0 ? "," : ""; + recipe += sep + "[" + A._Universe__canonicalRecipeJoin(optionalPositional) + "]"; + } + if (namedLength > 0) { + sep = requiredPositionalLength > 0 ? "," : ""; + recipe += sep + "{" + A._Universe__canonicalRecipeJoinNamed(named) + "}"; + } + key = s + (recipe + ")"); + probe = universe.eC.get(key); + if (probe != null) + return probe; + rti = new A.Rti(null, null); + rti._kind = 12; + rti._primary = returnType; + rti._rest = parameters; + rti._canonicalRecipe = key; + t1 = A._Universe__installTypeTests(universe, rti); + universe.eC.set(key, t1); + return t1; + }, + _Universe__lookupGenericFunctionRti(universe, baseFunctionType, bounds, normalize) { + var t1, + key = baseFunctionType._canonicalRecipe + ("<" + A._Universe__canonicalRecipeJoin(bounds) + ">"), + probe = universe.eC.get(key); + if (probe != null) + return probe; + t1 = A._Universe__createGenericFunctionRti(universe, baseFunctionType, bounds, key, normalize); + universe.eC.set(key, t1); + return t1; + }, + _Universe__createGenericFunctionRti(universe, baseFunctionType, bounds, key, normalize) { + var $length, typeArguments, count, i, bound, substitutedBase, substitutedBounds, rti; + if (normalize) { + $length = bounds.length; + typeArguments = A._Utils_newArrayOrEmpty($length); + for (count = 0, i = 0; i < $length; ++i) { + bound = bounds[i]; + if (bound._kind === 1) { + typeArguments[i] = bound; + ++count; + } + } + if (count > 0) { + substitutedBase = A._substitute(universe, baseFunctionType, typeArguments, 0); + substitutedBounds = A._substituteArray(universe, bounds, typeArguments, 0); + return A._Universe__lookupGenericFunctionRti(universe, substitutedBase, substitutedBounds, bounds !== substitutedBounds); + } + } + rti = new A.Rti(null, null); + rti._kind = 13; + rti._primary = baseFunctionType; + rti._rest = bounds; + rti._canonicalRecipe = key; + return A._Universe__installTypeTests(universe, rti); + }, + _Parser_create(universe, environment, recipe, normalize) { + return {u: universe, e: environment, r: recipe, s: [], p: 0, n: normalize}; + }, + _Parser_parse(parser) { + var t2, i, ch, t3, array, end, item, + source = parser.r, + t1 = parser.s; + for (t2 = source.length, i = 0; i < t2;) { + ch = source.charCodeAt(i); + if (ch >= 48 && ch <= 57) + i = A._Parser_handleDigit(i + 1, ch, source, t1); + else if ((((ch | 32) >>> 0) - 97 & 65535) < 26 || ch === 95 || ch === 36 || ch === 124) + i = A._Parser_handleIdentifier(parser, i, source, t1, false); + else if (ch === 46) + i = A._Parser_handleIdentifier(parser, i, source, t1, true); + else { + ++i; + switch (ch) { + case 44: + break; + case 58: + t1.push(false); + break; + case 33: + t1.push(true); + break; + case 59: + t1.push(A._Parser_toType(parser.u, parser.e, t1.pop())); + break; + case 94: + t1.push(A._Universe__lookupGenericFunctionParameterRti(parser.u, t1.pop())); + break; + case 35: + t1.push(A._Universe__lookupTerminalRti(parser.u, 5, "#")); + break; + case 64: + t1.push(A._Universe__lookupTerminalRti(parser.u, 2, "@")); + break; + case 126: + t1.push(A._Universe__lookupTerminalRti(parser.u, 3, "~")); + break; + case 60: + t1.push(parser.p); + parser.p = t1.length; + break; + case 62: + A._Parser_handleTypeArguments(parser, t1); + break; + case 38: + A._Parser_handleExtendedOperations(parser, t1); + break; + case 42: + t3 = parser.u; + t1.push(A._Universe__lookupStarRti(t3, A._Parser_toType(t3, parser.e, t1.pop()), parser.n)); + break; + case 63: + t3 = parser.u; + t1.push(A._Universe__lookupQuestionRti(t3, A._Parser_toType(t3, parser.e, t1.pop()), parser.n)); + break; + case 47: + t3 = parser.u; + t1.push(A._Universe__lookupFutureOrRti(t3, A._Parser_toType(t3, parser.e, t1.pop()), parser.n)); + break; + case 40: + t1.push(-3); + t1.push(parser.p); + parser.p = t1.length; + break; + case 41: + A._Parser_handleArguments(parser, t1); + break; + case 91: + t1.push(parser.p); + parser.p = t1.length; + break; + case 93: + array = t1.splice(parser.p); + A._Parser_toTypes(parser.u, parser.e, array); + parser.p = t1.pop(); + t1.push(array); + t1.push(-1); + break; + case 123: + t1.push(parser.p); + parser.p = t1.length; + break; + case 125: + array = t1.splice(parser.p); + A._Parser_toTypesNamed(parser.u, parser.e, array); + parser.p = t1.pop(); + t1.push(array); + t1.push(-2); + break; + case 43: + end = source.indexOf("(", i); + t1.push(source.substring(i, end)); + t1.push(-4); + t1.push(parser.p); + parser.p = t1.length; + i = end + 1; + break; + default: + throw "Bad character " + ch; + } + } + } + item = t1.pop(); + return A._Parser_toType(parser.u, parser.e, item); + }, + _Parser_handleDigit(i, digit, source, stack) { + var t1, ch, + value = digit - 48; + for (t1 = source.length; i < t1; ++i) { + ch = source.charCodeAt(i); + if (!(ch >= 48 && ch <= 57)) + break; + value = value * 10 + (ch - 48); + } + stack.push(value); + return i; + }, + _Parser_handleIdentifier(parser, start, source, stack, hasPeriod) { + var t1, ch, t2, string, environment, recipe, + i = start + 1; + for (t1 = source.length; i < t1; ++i) { + ch = source.charCodeAt(i); + if (ch === 46) { + if (hasPeriod) + break; + hasPeriod = true; + } else { + if (!((((ch | 32) >>> 0) - 97 & 65535) < 26 || ch === 95 || ch === 36 || ch === 124)) + t2 = ch >= 48 && ch <= 57; + else + t2 = true; + if (!t2) + break; + } + } + string = source.substring(start, i); + if (hasPeriod) { + t1 = parser.u; + environment = parser.e; + if (environment._kind === 10) + environment = environment._primary; + recipe = A._Universe_findRule(t1, environment._primary)[string]; + if (recipe == null) + A.throwExpression('No "' + string + '" in "' + A.Rti__getCanonicalRecipe(environment) + '"'); + stack.push(A._Universe_evalInEnvironment(t1, environment, recipe)); + } else + stack.push(string); + return i; + }, + _Parser_handleTypeArguments(parser, stack) { + var base, + t1 = parser.u, + $arguments = A._Parser_collectArray(parser, stack), + head = stack.pop(); + if (typeof head == "string") + stack.push(A._Universe__lookupInterfaceRti(t1, head, $arguments)); + else { + base = A._Parser_toType(t1, parser.e, head); + switch (base._kind) { + case 12: + stack.push(A._Universe__lookupGenericFunctionRti(t1, base, $arguments, parser.n)); + break; + default: + stack.push(A._Universe__lookupBindingRti(t1, base, $arguments)); + break; + } + } + }, + _Parser_handleArguments(parser, stack) { + var requiredPositional, returnType, parameters, + t1 = parser.u, + head = stack.pop(), + optionalPositional = null, named = null; + if (typeof head == "number") + switch (head) { + case -1: + optionalPositional = stack.pop(); + break; + case -2: + named = stack.pop(); + break; + default: + stack.push(head); + break; + } + else + stack.push(head); + requiredPositional = A._Parser_collectArray(parser, stack); + head = stack.pop(); + switch (head) { + case -3: + head = stack.pop(); + if (optionalPositional == null) + optionalPositional = t1.sEA; + if (named == null) + named = t1.sEA; + returnType = A._Parser_toType(t1, parser.e, head); + parameters = new A._FunctionParameters(); + parameters._requiredPositional = requiredPositional; + parameters._optionalPositional = optionalPositional; + parameters._named = named; + stack.push(A._Universe__lookupFunctionRti(t1, returnType, parameters)); + return; + case -4: + stack.push(A._Universe__lookupRecordRti(t1, stack.pop(), requiredPositional)); + return; + default: + throw A.wrapException(A.AssertionError$("Unexpected state under `()`: " + A.S(head))); + } + }, + _Parser_handleExtendedOperations(parser, stack) { + var $top = stack.pop(); + if (0 === $top) { + stack.push(A._Universe__lookupTerminalRti(parser.u, 1, "0&")); + return; + } + if (1 === $top) { + stack.push(A._Universe__lookupTerminalRti(parser.u, 4, "1&")); + return; + } + throw A.wrapException(A.AssertionError$("Unexpected extended operation " + A.S($top))); + }, + _Parser_collectArray(parser, stack) { + var array = stack.splice(parser.p); + A._Parser_toTypes(parser.u, parser.e, array); + parser.p = stack.pop(); + return array; + }, + _Parser_toType(universe, environment, item) { + if (typeof item == "string") + return A._Universe__lookupInterfaceRti(universe, item, universe.sEA); + else if (typeof item == "number") { + environment.toString; + return A._Parser_indexToType(universe, environment, item); + } else + return item; + }, + _Parser_toTypes(universe, environment, items) { + var i, + $length = items.length; + for (i = 0; i < $length; ++i) + items[i] = A._Parser_toType(universe, environment, items[i]); + }, + _Parser_toTypesNamed(universe, environment, items) { + var i, + $length = items.length; + for (i = 2; i < $length; i += 3) + items[i] = A._Parser_toType(universe, environment, items[i]); + }, + _Parser_indexToType(universe, environment, index) { + var typeArguments, len, + kind = environment._kind; + if (kind === 10) { + if (index === 0) + return environment._primary; + typeArguments = environment._rest; + len = typeArguments.length; + if (index <= len) + return typeArguments[index - 1]; + index -= len; + environment = environment._primary; + kind = environment._kind; + } else if (index === 0) + return environment; + if (kind !== 9) + throw A.wrapException(A.AssertionError$("Indexed base must be an interface type")); + typeArguments = environment._rest; + if (index <= typeArguments.length) + return typeArguments[index - 1]; + throw A.wrapException(A.AssertionError$("Bad index " + index + " for " + environment.toString$0(0))); + }, + isSubtype(universe, s, t) { + var result, + sCache = s._isSubtypeCache; + if (sCache == null) + sCache = s._isSubtypeCache = new Map(); + result = sCache.get(t); + if (result == null) { + result = A._isSubtype(universe, s, null, t, null, false) ? 1 : 0; + sCache.set(t, result); + } + if (0 === result) + return false; + if (1 === result) + return true; + return true; + }, + _isSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var t1, sKind, leftTypeVariable, tKind, t2, sBounds, tBounds, sLength, i, sBound, tBound; + if (s === t) + return true; + if (!A.isSoundTopType(t)) + t1 = t === type$.legacy_Object; + else + t1 = true; + if (t1) + return true; + sKind = s._kind; + if (sKind === 4) + return true; + if (A.isSoundTopType(s)) + return false; + t1 = s._kind; + if (t1 === 1) + return true; + leftTypeVariable = sKind === 14; + if (leftTypeVariable) + if (A._isSubtype(universe, sEnv[s._primary], sEnv, t, tEnv, false)) + return true; + tKind = t._kind; + t1 = s === type$.Null || s === type$.JSNull; + if (t1) { + if (tKind === 8) + return A._isSubtype(universe, s, sEnv, t._primary, tEnv, false); + return t === type$.Null || t === type$.JSNull || tKind === 7 || tKind === 6; + } + if (t === type$.Object) { + if (sKind === 8) + return A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + if (sKind === 6) + return A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + return sKind !== 7; + } + if (sKind === 6) + return A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + if (tKind === 6) { + t1 = A.Rti__getQuestionFromStar(universe, t); + return A._isSubtype(universe, s, sEnv, t1, tEnv, false); + } + if (sKind === 8) { + if (!A._isSubtype(universe, s._primary, sEnv, t, tEnv, false)) + return false; + return A._isSubtype(universe, A.Rti__getFutureFromFutureOr(universe, s), sEnv, t, tEnv, false); + } + if (sKind === 7) { + t1 = A._isSubtype(universe, type$.Null, sEnv, t, tEnv, false); + return t1 && A._isSubtype(universe, s._primary, sEnv, t, tEnv, false); + } + if (tKind === 8) { + if (A._isSubtype(universe, s, sEnv, t._primary, tEnv, false)) + return true; + return A._isSubtype(universe, s, sEnv, A.Rti__getFutureFromFutureOr(universe, t), tEnv, false); + } + if (tKind === 7) { + t1 = A._isSubtype(universe, s, sEnv, type$.Null, tEnv, false); + return t1 || A._isSubtype(universe, s, sEnv, t._primary, tEnv, false); + } + if (leftTypeVariable) + return false; + t1 = sKind !== 12; + if ((!t1 || sKind === 13) && t === type$.Function) + return true; + t2 = sKind === 11; + if (t2 && t === type$.Record) + return true; + if (tKind === 13) { + if (s === type$.JavaScriptFunction) + return true; + if (sKind !== 13) + return false; + sBounds = s._rest; + tBounds = t._rest; + sLength = sBounds.length; + if (sLength !== tBounds.length) + return false; + sEnv = sEnv == null ? sBounds : sBounds.concat(sEnv); + tEnv = tEnv == null ? tBounds : tBounds.concat(tEnv); + for (i = 0; i < sLength; ++i) { + sBound = sBounds[i]; + tBound = tBounds[i]; + if (!A._isSubtype(universe, sBound, sEnv, tBound, tEnv, false) || !A._isSubtype(universe, tBound, tEnv, sBound, sEnv, false)) + return false; + } + return A._isFunctionSubtype(universe, s._primary, sEnv, t._primary, tEnv, false); + } + if (tKind === 12) { + if (s === type$.JavaScriptFunction) + return true; + if (t1) + return false; + return A._isFunctionSubtype(universe, s, sEnv, t, tEnv, false); + } + if (sKind === 9) { + if (tKind !== 9) + return false; + return A._isInterfaceSubtype(universe, s, sEnv, t, tEnv, false); + } + if (t2 && tKind === 11) + return A._isRecordSubtype(universe, s, sEnv, t, tEnv, false); + return false; + }, + _isFunctionSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var sParameters, tParameters, sRequiredPositional, tRequiredPositional, sRequiredPositionalLength, tRequiredPositionalLength, requiredPositionalDelta, sOptionalPositional, tOptionalPositional, sOptionalPositionalLength, tOptionalPositionalLength, i, t1, sNamed, tNamed, sNamedLength, tNamedLength, sIndex, tIndex, tName, sName, sIsRequired; + if (!A._isSubtype(universe, s._primary, sEnv, t._primary, tEnv, false)) + return false; + sParameters = s._rest; + tParameters = t._rest; + sRequiredPositional = sParameters._requiredPositional; + tRequiredPositional = tParameters._requiredPositional; + sRequiredPositionalLength = sRequiredPositional.length; + tRequiredPositionalLength = tRequiredPositional.length; + if (sRequiredPositionalLength > tRequiredPositionalLength) + return false; + requiredPositionalDelta = tRequiredPositionalLength - sRequiredPositionalLength; + sOptionalPositional = sParameters._optionalPositional; + tOptionalPositional = tParameters._optionalPositional; + sOptionalPositionalLength = sOptionalPositional.length; + tOptionalPositionalLength = tOptionalPositional.length; + if (sRequiredPositionalLength + sOptionalPositionalLength < tRequiredPositionalLength + tOptionalPositionalLength) + return false; + for (i = 0; i < sRequiredPositionalLength; ++i) { + t1 = sRequiredPositional[i]; + if (!A._isSubtype(universe, tRequiredPositional[i], tEnv, t1, sEnv, false)) + return false; + } + for (i = 0; i < requiredPositionalDelta; ++i) { + t1 = sOptionalPositional[i]; + if (!A._isSubtype(universe, tRequiredPositional[sRequiredPositionalLength + i], tEnv, t1, sEnv, false)) + return false; + } + for (i = 0; i < tOptionalPositionalLength; ++i) { + t1 = sOptionalPositional[requiredPositionalDelta + i]; + if (!A._isSubtype(universe, tOptionalPositional[i], tEnv, t1, sEnv, false)) + return false; + } + sNamed = sParameters._named; + tNamed = tParameters._named; + sNamedLength = sNamed.length; + tNamedLength = tNamed.length; + for (sIndex = 0, tIndex = 0; tIndex < tNamedLength; tIndex += 3) { + tName = tNamed[tIndex]; + for (; true;) { + if (sIndex >= sNamedLength) + return false; + sName = sNamed[sIndex]; + sIndex += 3; + if (tName < sName) + return false; + sIsRequired = sNamed[sIndex - 2]; + if (sName < tName) { + if (sIsRequired) + return false; + continue; + } + t1 = tNamed[tIndex + 1]; + if (sIsRequired && !t1) + return false; + t1 = sNamed[sIndex - 1]; + if (!A._isSubtype(universe, tNamed[tIndex + 2], tEnv, t1, sEnv, false)) + return false; + break; + } + } + for (; sIndex < sNamedLength;) { + if (sNamed[sIndex + 1]) + return false; + sIndex += 3; + } + return true; + }, + _isInterfaceSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var rule, recipes, $length, supertypeArgs, i, + sName = s._primary, + tName = t._primary; + for (; sName !== tName;) { + rule = universe.tR[sName]; + if (rule == null) + return false; + if (typeof rule == "string") { + sName = rule; + continue; + } + recipes = rule[tName]; + if (recipes == null) + return false; + $length = recipes.length; + supertypeArgs = $length > 0 ? new Array($length) : init.typeUniverse.sEA; + for (i = 0; i < $length; ++i) + supertypeArgs[i] = A._Universe_evalInEnvironment(universe, s, recipes[i]); + return A._areArgumentsSubtypes(universe, supertypeArgs, null, sEnv, t._rest, tEnv, false); + } + return A._areArgumentsSubtypes(universe, s._rest, null, sEnv, t._rest, tEnv, false); + }, + _areArgumentsSubtypes(universe, sArgs, sVariances, sEnv, tArgs, tEnv, isLegacy) { + var i, + $length = sArgs.length; + for (i = 0; i < $length; ++i) + if (!A._isSubtype(universe, sArgs[i], sEnv, tArgs[i], tEnv, false)) + return false; + return true; + }, + _isRecordSubtype(universe, s, sEnv, t, tEnv, isLegacy) { + var i, + sFields = s._rest, + tFields = t._rest, + sCount = sFields.length; + if (sCount !== tFields.length) + return false; + if (s._primary !== t._primary) + return false; + for (i = 0; i < sCount; ++i) + if (!A._isSubtype(universe, sFields[i], sEnv, tFields[i], tEnv, false)) + return false; + return true; + }, + isNullable(t) { + var kind = t._kind, + t1 = true; + if (!(t === type$.Null || t === type$.JSNull)) + if (!A.isSoundTopType(t)) + if (kind !== 7) + if (!(kind === 6 && A.isNullable(t._primary))) + t1 = kind === 8 && A.isNullable(t._primary); + return t1; + }, + isDefinitelyTopType(t) { + var t1; + if (!A.isSoundTopType(t)) + t1 = t === type$.legacy_Object; + else + t1 = true; + return t1; + }, + isSoundTopType(t) { + var kind = t._kind; + return kind === 2 || kind === 3 || kind === 4 || kind === 5 || t === type$.nullable_Object; + }, + _Utils_objectAssign(o, other) { + var i, key, + keys = Object.keys(other), + $length = keys.length; + for (i = 0; i < $length; ++i) { + key = keys[i]; + o[key] = other[key]; + } + }, + _Utils_newArrayOrEmpty($length) { + return $length > 0 ? new Array($length) : init.typeUniverse.sEA; + }, + Rti: function Rti(t0, t1) { + var _ = this; + _._as = t0; + _._is = t1; + _._cachedRuntimeType = _._specializedTestResource = _._isSubtypeCache = _._precomputed1 = null; + _._kind = 0; + _._canonicalRecipe = _._bindCache = _._evalCache = _._rest = _._primary = null; + }, + _FunctionParameters: function _FunctionParameters() { + this._named = this._optionalPositional = this._requiredPositional = null; + }, + _Type: function _Type(t0) { + this._rti = t0; + }, + _Error: function _Error() { + }, + _TypeError: function _TypeError(t0) { + this.__rti$_message = t0; + }, + _AsyncRun__initializeScheduleImmediate() { + var t1, div, span; + if (self.scheduleImmediate != null) + return A.async__AsyncRun__scheduleImmediateJsOverride$closure(); + if (self.MutationObserver != null && self.document != null) { + t1 = {}; + div = self.document.createElement("div"); + span = self.document.createElement("span"); + t1.storedCallback = null; + new self.MutationObserver(A.convertDartClosureToJS(new A._AsyncRun__initializeScheduleImmediate_internalCallback(t1), 1)).observe(div, {childList: true}); + return new A._AsyncRun__initializeScheduleImmediate_closure(t1, div, span); + } else if (self.setImmediate != null) + return A.async__AsyncRun__scheduleImmediateWithSetImmediate$closure(); + return A.async__AsyncRun__scheduleImmediateWithTimer$closure(); + }, + _AsyncRun__scheduleImmediateJsOverride(callback) { + self.scheduleImmediate(A.convertDartClosureToJS(new A._AsyncRun__scheduleImmediateJsOverride_internalCallback(type$.void_Function._as(callback)), 0)); + }, + _AsyncRun__scheduleImmediateWithSetImmediate(callback) { + self.setImmediate(A.convertDartClosureToJS(new A._AsyncRun__scheduleImmediateWithSetImmediate_internalCallback(type$.void_Function._as(callback)), 0)); + }, + _AsyncRun__scheduleImmediateWithTimer(callback) { + type$.void_Function._as(callback); + A._TimerImpl$(0, callback); + }, + _TimerImpl$(milliseconds, callback) { + var t1 = new A._TimerImpl(); + t1._TimerImpl$2(milliseconds, callback); + return t1; + }, + _makeAsyncAwaitCompleter($T) { + return new A._AsyncAwaitCompleter(new A._Future($.Zone__current, $T._eval$1("_Future<0>")), $T._eval$1("_AsyncAwaitCompleter<0>")); + }, + _asyncStartSync(bodyFunction, completer) { + bodyFunction.call$2(0, null); + completer.isSync = true; + return completer._future; + }, + _asyncAwait(object, bodyFunction) { + A._awaitOnObject(object, bodyFunction); + }, + _asyncReturn(object, completer) { + completer.complete$1(object); + }, + _asyncRethrow(object, completer) { + completer.completeError$2(A.unwrapException(object), A.getTraceFromException(object)); + }, + _awaitOnObject(object, bodyFunction) { + var t1, future, + thenCallback = new A._awaitOnObject_closure(bodyFunction), + errorCallback = new A._awaitOnObject_closure0(bodyFunction); + if (object instanceof A._Future) + object._thenAwait$1$2(thenCallback, errorCallback, type$.dynamic); + else { + t1 = type$.dynamic; + if (object instanceof A._Future) + object.then$1$2$onError(thenCallback, errorCallback, t1); + else { + future = new A._Future($.Zone__current, type$._Future_dynamic); + future._state = 8; + future._resultOrListeners = object; + future._thenAwait$1$2(thenCallback, errorCallback, t1); + } + } + }, + _wrapJsFunctionForAsync($function) { + var $protected = function(fn, ERROR) { + return function(errorCode, result) { + while (true) { + try { + fn(errorCode, result); + break; + } catch (error) { + result = error; + errorCode = ERROR; + } + } + }; + }($function, 1); + return $.Zone__current.registerBinaryCallback$3$1(new A._wrapJsFunctionForAsync_closure($protected), type$.void, type$.int, type$.dynamic); + }, + AsyncError_defaultStackTrace(error) { + var stackTrace; + if (type$.Error._is(error)) { + stackTrace = error.get$stackTrace(); + if (stackTrace != null) + return stackTrace; + } + return B.C__StringStackTrace; + }, + _interceptError(error, stackTrace) { + if ($.Zone__current === B.C__RootZone) + return null; + return null; + }, + _interceptUserError(error, stackTrace) { + if ($.Zone__current !== B.C__RootZone) + A._interceptError(error, stackTrace); + if (stackTrace == null) + if (type$.Error._is(error)) { + stackTrace = error.get$stackTrace(); + if (stackTrace == null) { + A.Primitives_trySetStackTrace(error, B.C__StringStackTrace); + stackTrace = B.C__StringStackTrace; + } + } else + stackTrace = B.C__StringStackTrace; + else if (type$.Error._is(error)) + A.Primitives_trySetStackTrace(error, stackTrace); + return new A.AsyncError(error, stackTrace); + }, + _Future__chainCoreFuture(source, target, sync) { + var t2, t3, ignoreError, listeners, _box_0 = {}, + t1 = _box_0.source = source; + for (t2 = type$._Future_dynamic; t3 = t1._state, (t3 & 4) !== 0; t1 = source) { + source = t2._as(t1._resultOrListeners); + _box_0.source = source; + } + if (t1 === target) { + target._asyncCompleteError$2(new A.ArgumentError(true, t1, null, "Cannot complete a future with itself"), A.StackTrace_current()); + return; + } + ignoreError = target._state & 1; + t2 = t1._state = t3 | ignoreError; + if ((t2 & 24) === 0) { + listeners = type$.nullable__FutureListener_dynamic_dynamic._as(target._resultOrListeners); + target._state = target._state & 1 | 4; + target._resultOrListeners = t1; + t1._prependListeners$1(listeners); + return; + } + if (!sync) + if (target._resultOrListeners == null) + t1 = (t2 & 16) === 0 || ignoreError !== 0; + else + t1 = false; + else + t1 = true; + if (t1) { + listeners = target._removeListeners$0(); + target._cloneResult$1(_box_0.source); + A._Future__propagateToListeners(target, listeners); + return; + } + target._state ^= 2; + A._rootScheduleMicrotask(null, null, target._zone, type$.void_Function._as(new A._Future__chainCoreFuture_closure(_box_0, target))); + }, + _Future__propagateToListeners(source, listeners) { + var t2, t3, t4, _box_0, t5, t6, hasError, asyncError, nextListener, nextListener0, sourceResult, t7, zone, oldZone, result, current, _box_1 = {}, + t1 = _box_1.source = source; + for (t2 = type$.AsyncError, t3 = type$.nullable__FutureListener_dynamic_dynamic, t4 = type$.Future_dynamic; true;) { + _box_0 = {}; + t5 = t1._state; + t6 = (t5 & 16) === 0; + hasError = !t6; + if (listeners == null) { + if (hasError && (t5 & 1) === 0) { + asyncError = t2._as(t1._resultOrListeners); + A._rootHandleError(asyncError.error, asyncError.stackTrace); + } + return; + } + _box_0.listener = listeners; + nextListener = listeners._nextListener; + for (t1 = listeners; nextListener != null; t1 = nextListener, nextListener = nextListener0) { + t1._nextListener = null; + A._Future__propagateToListeners(_box_1.source, t1); + _box_0.listener = nextListener; + nextListener0 = nextListener._nextListener; + } + t5 = _box_1.source; + sourceResult = t5._resultOrListeners; + _box_0.listenerHasError = hasError; + _box_0.listenerValueOrError = sourceResult; + if (t6) { + t7 = t1.state; + t7 = (t7 & 1) !== 0 || (t7 & 15) === 8; + } else + t7 = true; + if (t7) { + zone = t1.result._zone; + if (hasError) { + t5 = t5._zone === zone; + t5 = !(t5 || t5); + } else + t5 = false; + if (t5) { + t2._as(sourceResult); + A._rootHandleError(sourceResult.error, sourceResult.stackTrace); + return; + } + oldZone = $.Zone__current; + if (oldZone !== zone) + $.Zone__current = zone; + else + oldZone = null; + t1 = t1.state; + if ((t1 & 15) === 8) + new A._Future__propagateToListeners_handleWhenCompleteCallback(_box_0, _box_1, hasError).call$0(); + else if (t6) { + if ((t1 & 1) !== 0) + new A._Future__propagateToListeners_handleValueCallback(_box_0, sourceResult).call$0(); + } else if ((t1 & 2) !== 0) + new A._Future__propagateToListeners_handleError(_box_1, _box_0).call$0(); + if (oldZone != null) + $.Zone__current = oldZone; + t1 = _box_0.listenerValueOrError; + if (t1 instanceof A._Future) { + t5 = _box_0.listener.$ti; + t5 = t5._eval$1("Future<2>")._is(t1) || !t5._rest[1]._is(t1); + } else + t5 = false; + if (t5) { + t4._as(t1); + result = _box_0.listener.result; + if ((t1._state & 24) !== 0) { + current = t3._as(result._resultOrListeners); + result._resultOrListeners = null; + listeners = result._reverseListeners$1(current); + result._state = t1._state & 30 | result._state & 1; + result._resultOrListeners = t1._resultOrListeners; + _box_1.source = t1; + continue; + } else + A._Future__chainCoreFuture(t1, result, true); + return; + } + } + result = _box_0.listener.result; + current = t3._as(result._resultOrListeners); + result._resultOrListeners = null; + listeners = result._reverseListeners$1(current); + t1 = _box_0.listenerHasError; + t5 = _box_0.listenerValueOrError; + if (!t1) { + result.$ti._precomputed1._as(t5); + result._state = 8; + result._resultOrListeners = t5; + } else { + t2._as(t5); + result._state = result._state & 1 | 16; + result._resultOrListeners = t5; + } + _box_1.source = result; + t1 = result; + } + }, + _registerErrorHandler(errorHandler, zone) { + var t1; + if (type$.dynamic_Function_Object_StackTrace._is(errorHandler)) + return zone.registerBinaryCallback$3$1(errorHandler, type$.dynamic, type$.Object, type$.StackTrace); + t1 = type$.dynamic_Function_Object; + if (t1._is(errorHandler)) + return t1._as(errorHandler); + throw A.wrapException(A.ArgumentError$value(errorHandler, "onError", string$.Error_)); + }, + _microtaskLoop() { + var entry, next; + for (entry = $._nextCallback; entry != null; entry = $._nextCallback) { + $._lastPriorityCallback = null; + next = entry.next; + $._nextCallback = next; + if (next == null) + $._lastCallback = null; + entry.callback.call$0(); + } + }, + _startMicrotaskLoop() { + $._isInCallbackLoop = true; + try { + A._microtaskLoop(); + } finally { + $._lastPriorityCallback = null; + $._isInCallbackLoop = false; + if ($._nextCallback != null) + $.$get$_AsyncRun__scheduleImmediateClosure().call$1(A.async___startMicrotaskLoop$closure()); + } + }, + _scheduleAsyncCallback(callback) { + var newEntry = new A._AsyncCallbackEntry(callback), + lastCallback = $._lastCallback; + if (lastCallback == null) { + $._nextCallback = $._lastCallback = newEntry; + if (!$._isInCallbackLoop) + $.$get$_AsyncRun__scheduleImmediateClosure().call$1(A.async___startMicrotaskLoop$closure()); + } else + $._lastCallback = lastCallback.next = newEntry; + }, + _schedulePriorityAsyncCallback(callback) { + var entry, lastPriorityCallback, next, + t1 = $._nextCallback; + if (t1 == null) { + A._scheduleAsyncCallback(callback); + $._lastPriorityCallback = $._lastCallback; + return; + } + entry = new A._AsyncCallbackEntry(callback); + lastPriorityCallback = $._lastPriorityCallback; + if (lastPriorityCallback == null) { + entry.next = t1; + $._nextCallback = $._lastPriorityCallback = entry; + } else { + next = lastPriorityCallback.next; + entry.next = next; + $._lastPriorityCallback = lastPriorityCallback.next = entry; + if (next == null) + $._lastCallback = entry; + } + }, + scheduleMicrotask(callback) { + var _null = null, + currentZone = $.Zone__current; + if (B.C__RootZone === currentZone) { + A._rootScheduleMicrotask(_null, _null, B.C__RootZone, callback); + return; + } + A._rootScheduleMicrotask(_null, _null, currentZone, type$.void_Function._as(currentZone.bindCallbackGuarded$1(callback))); + }, + StreamIterator_StreamIterator(stream, $T) { + A.checkNotNullable(stream, "stream", type$.Object); + return new A._StreamIterator($T._eval$1("_StreamIterator<0>")); + }, + _runGuarded(notificationHandler) { + return; + }, + _BufferingStreamSubscription__registerErrorHandler(zone, handleError) { + if (handleError == null) + handleError = A.async___nullErrorHandler$closure(); + if (type$.void_Function_Object_StackTrace._is(handleError)) + return zone.registerBinaryCallback$3$1(handleError, type$.dynamic, type$.Object, type$.StackTrace); + if (type$.void_Function_Object._is(handleError)) + return type$.dynamic_Function_Object._as(handleError); + throw A.wrapException(A.ArgumentError$("handleError callback must take either an Object (the error), or both an Object (the error) and a StackTrace.", null)); + }, + _nullErrorHandler(error, stackTrace) { + A._rootHandleError(error, stackTrace); + }, + _nullDoneHandler() { + }, + _rootHandleError(error, stackTrace) { + A._schedulePriorityAsyncCallback(new A._rootHandleError_closure(error, stackTrace)); + }, + _rootRun($self, $parent, zone, f, $R) { + var old, + t1 = $.Zone__current; + if (t1 === zone) + return f.call$0(); + $.Zone__current = zone; + old = t1; + try { + t1 = f.call$0(); + return t1; + } finally { + $.Zone__current = old; + } + }, + _rootRunUnary($self, $parent, zone, f, arg, $R, $T) { + var old, + t1 = $.Zone__current; + if (t1 === zone) + return f.call$1(arg); + $.Zone__current = zone; + old = t1; + try { + t1 = f.call$1(arg); + return t1; + } finally { + $.Zone__current = old; + } + }, + _rootRunBinary($self, $parent, zone, f, arg1, arg2, $R, T1, T2) { + var old, + t1 = $.Zone__current; + if (t1 === zone) + return f.call$2(arg1, arg2); + $.Zone__current = zone; + old = t1; + try { + t1 = f.call$2(arg1, arg2); + return t1; + } finally { + $.Zone__current = old; + } + }, + _rootScheduleMicrotask($self, $parent, zone, f) { + type$.void_Function._as(f); + if (B.C__RootZone !== zone) + f = zone.bindCallbackGuarded$1(f); + A._scheduleAsyncCallback(f); + }, + _AsyncRun__initializeScheduleImmediate_internalCallback: function _AsyncRun__initializeScheduleImmediate_internalCallback(t0) { + this._box_0 = t0; + }, + _AsyncRun__initializeScheduleImmediate_closure: function _AsyncRun__initializeScheduleImmediate_closure(t0, t1, t2) { + this._box_0 = t0; + this.div = t1; + this.span = t2; + }, + _AsyncRun__scheduleImmediateJsOverride_internalCallback: function _AsyncRun__scheduleImmediateJsOverride_internalCallback(t0) { + this.callback = t0; + }, + _AsyncRun__scheduleImmediateWithSetImmediate_internalCallback: function _AsyncRun__scheduleImmediateWithSetImmediate_internalCallback(t0) { + this.callback = t0; + }, + _TimerImpl: function _TimerImpl() { + }, + _TimerImpl_internalCallback: function _TimerImpl_internalCallback(t0, t1) { + this.$this = t0; + this.callback = t1; + }, + _AsyncAwaitCompleter: function _AsyncAwaitCompleter(t0, t1) { + this._future = t0; + this.isSync = false; + this.$ti = t1; + }, + _awaitOnObject_closure: function _awaitOnObject_closure(t0) { + this.bodyFunction = t0; + }, + _awaitOnObject_closure0: function _awaitOnObject_closure0(t0) { + this.bodyFunction = t0; + }, + _wrapJsFunctionForAsync_closure: function _wrapJsFunctionForAsync_closure(t0) { + this.$protected = t0; + }, + AsyncError: function AsyncError(t0, t1) { + this.error = t0; + this.stackTrace = t1; + }, + _BroadcastStream: function _BroadcastStream(t0, t1) { + this._async$_controller = t0; + this.$ti = t1; + }, + _BroadcastSubscription: function _BroadcastSubscription(t0, t1, t2, t3, t4) { + var _ = this; + _._eventState = 0; + _._async$_previous = _._async$_next = null; + _._async$_controller = t0; + _._onData = t1; + _._zone = t2; + _._state = t3; + _._pending = null; + _.$ti = t4; + }, + _BroadcastStreamController: function _BroadcastStreamController() { + }, + _SyncBroadcastStreamController: function _SyncBroadcastStreamController(t0, t1, t2) { + var _ = this; + _.onListen = t0; + _.onCancel = t1; + _._state = 0; + _._lastSubscription = _._firstSubscription = null; + _.$ti = t2; + }, + _SyncBroadcastStreamController__sendData_closure: function _SyncBroadcastStreamController__sendData_closure(t0, t1) { + this.$this = t0; + this.data = t1; + }, + _Completer: function _Completer() { + }, + _AsyncCompleter: function _AsyncCompleter(t0, t1) { + this.future = t0; + this.$ti = t1; + }, + _FutureListener: function _FutureListener(t0, t1, t2, t3, t4) { + var _ = this; + _._nextListener = null; + _.result = t0; + _.state = t1; + _.callback = t2; + _.errorCallback = t3; + _.$ti = t4; + }, + _Future: function _Future(t0, t1) { + var _ = this; + _._state = 0; + _._zone = t0; + _._resultOrListeners = null; + _.$ti = t1; + }, + _Future__addListener_closure: function _Future__addListener_closure(t0, t1) { + this.$this = t0; + this.listener = t1; + }, + _Future__prependListeners_closure: function _Future__prependListeners_closure(t0, t1) { + this._box_0 = t0; + this.$this = t1; + }, + _Future__chainForeignFuture_closure: function _Future__chainForeignFuture_closure(t0) { + this.$this = t0; + }, + _Future__chainForeignFuture_closure0: function _Future__chainForeignFuture_closure0(t0) { + this.$this = t0; + }, + _Future__chainForeignFuture_closure1: function _Future__chainForeignFuture_closure1(t0, t1, t2) { + this.$this = t0; + this.e = t1; + this.s = t2; + }, + _Future__chainCoreFuture_closure: function _Future__chainCoreFuture_closure(t0, t1) { + this._box_0 = t0; + this.target = t1; + }, + _Future__asyncCompleteWithValue_closure: function _Future__asyncCompleteWithValue_closure(t0, t1) { + this.$this = t0; + this.value = t1; + }, + _Future__asyncCompleteError_closure: function _Future__asyncCompleteError_closure(t0, t1, t2) { + this.$this = t0; + this.error = t1; + this.stackTrace = t2; + }, + _Future__propagateToListeners_handleWhenCompleteCallback: function _Future__propagateToListeners_handleWhenCompleteCallback(t0, t1, t2) { + this._box_0 = t0; + this._box_1 = t1; + this.hasError = t2; + }, + _Future__propagateToListeners_handleWhenCompleteCallback_closure: function _Future__propagateToListeners_handleWhenCompleteCallback_closure(t0, t1) { + this.joinedResult = t0; + this.originalSource = t1; + }, + _Future__propagateToListeners_handleWhenCompleteCallback_closure0: function _Future__propagateToListeners_handleWhenCompleteCallback_closure0(t0) { + this.joinedResult = t0; + }, + _Future__propagateToListeners_handleValueCallback: function _Future__propagateToListeners_handleValueCallback(t0, t1) { + this._box_0 = t0; + this.sourceResult = t1; + }, + _Future__propagateToListeners_handleError: function _Future__propagateToListeners_handleError(t0, t1) { + this._box_1 = t0; + this._box_0 = t1; + }, + _AsyncCallbackEntry: function _AsyncCallbackEntry(t0) { + this.callback = t0; + this.next = null; + }, + Stream: function Stream() { + }, + Stream_length_closure: function Stream_length_closure(t0, t1) { + this._box_0 = t0; + this.$this = t1; + }, + Stream_length_closure0: function Stream_length_closure0(t0, t1) { + this._box_0 = t0; + this.future = t1; + }, + _ControllerStream: function _ControllerStream() { + }, + _ControllerSubscription: function _ControllerSubscription() { + }, + _BufferingStreamSubscription: function _BufferingStreamSubscription() { + }, + _StreamImpl: function _StreamImpl() { + }, + _DelayedEvent: function _DelayedEvent() { + }, + _DelayedData: function _DelayedData(t0, t1) { + this.value = t0; + this.next = null; + this.$ti = t1; + }, + _PendingEvents: function _PendingEvents(t0) { + var _ = this; + _._state = 0; + _.lastPendingEvent = _.firstPendingEvent = null; + _.$ti = t0; + }, + _PendingEvents_schedule_closure: function _PendingEvents_schedule_closure(t0, t1) { + this.$this = t0; + this.dispatch = t1; + }, + _DoneStreamSubscription: function _DoneStreamSubscription(t0, t1) { + var _ = this; + _._state = 1; + _._zone = t0; + _._onDone = null; + _.$ti = t1; + }, + _StreamIterator: function _StreamIterator(t0) { + this.$ti = t0; + }, + _Zone: function _Zone() { + }, + _rootHandleError_closure: function _rootHandleError_closure(t0, t1) { + this.error = t0; + this.stackTrace = t1; + }, + _RootZone: function _RootZone() { + }, + _RootZone_bindCallbackGuarded_closure: function _RootZone_bindCallbackGuarded_closure(t0, t1) { + this.$this = t0; + this.f = t1; + }, + _HashMap__getTableEntry(table, key) { + var entry = table[key]; + return entry === table ? null : entry; + }, + _HashMap__setTableEntry(table, key, value) { + if (value == null) + table[key] = table; + else + table[key] = value; + }, + _HashMap__newHashTable() { + var table = Object.create(null); + A._HashMap__setTableEntry(table, "", table); + delete table[""]; + return table; + }, + LinkedHashMap_LinkedHashMap$_literal(keyValuePairs, $K, $V) { + return $K._eval$1("@<0>")._bind$1($V)._eval$1("LinkedHashMap<1,2>")._as(A.fillLiteralMap(keyValuePairs, new A.JsLinkedHashMap($K._eval$1("@<0>")._bind$1($V)._eval$1("JsLinkedHashMap<1,2>")))); + }, + LinkedHashMap_LinkedHashMap$_empty($K, $V) { + return new A.JsLinkedHashMap($K._eval$1("@<0>")._bind$1($V)._eval$1("JsLinkedHashMap<1,2>")); + }, + MapBase_mapToString(m) { + var result, t1; + if (A.isToStringVisiting(m)) + return "{...}"; + result = new A.StringBuffer(""); + try { + t1 = {}; + B.JSArray_methods.add$1($.toStringVisiting, m); + result._contents += "{"; + t1.first = true; + m.forEach$1(0, new A.MapBase_mapToString_closure(t1, result)); + result._contents += "}"; + } finally { + if (0 >= $.toStringVisiting.length) + return A.ioore($.toStringVisiting, -1); + $.toStringVisiting.pop(); + } + t1 = result._contents; + return t1.charCodeAt(0) == 0 ? t1 : t1; + }, + _HashMap: function _HashMap() { + }, + _IdentityHashMap: function _IdentityHashMap(t0) { + var _ = this; + _._collection$_length = 0; + _._collection$_keys = _._collection$_rest = _._collection$_nums = _._collection$_strings = null; + _.$ti = t0; + }, + _HashMapKeyIterable: function _HashMapKeyIterable(t0, t1) { + this._collection$_map = t0; + this.$ti = t1; + }, + _HashMapKeyIterator: function _HashMapKeyIterator(t0, t1, t2) { + var _ = this; + _._collection$_map = t0; + _._collection$_keys = t1; + _._offset = 0; + _._collection$_current = null; + _.$ti = t2; + }, + ListBase: function ListBase() { + }, + MapBase: function MapBase() { + }, + MapBase_mapToString_closure: function MapBase_mapToString_closure(t0, t1) { + this._box_0 = t0; + this.result = t1; + }, + _UnmodifiableMapMixin: function _UnmodifiableMapMixin() { + }, + MapView: function MapView() { + }, + UnmodifiableMapView: function UnmodifiableMapView() { + }, + _UnmodifiableMapView_MapView__UnmodifiableMapMixin: function _UnmodifiableMapView_MapView__UnmodifiableMapMixin() { + }, + _Base64Encoder_encodeChunk(alphabet, bytes, start, end, isLast, output, outputIndex, state) { + var t1, t2, t3, i, byteOr, byte, outputIndex0, t4, t5, outputIndex1, + bits = state >>> 2, + expectedChars = 3 - (state & 3); + for (t1 = bytes.length, t2 = alphabet.length, t3 = output.$flags | 0, i = start, byteOr = 0; i < end; ++i) { + if (!(i < t1)) + return A.ioore(bytes, i); + byte = bytes[i]; + byteOr |= byte; + bits = (bits << 8 | byte) & 16777215; + --expectedChars; + if (expectedChars === 0) { + outputIndex0 = outputIndex + 1; + t4 = bits >>> 18 & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + t3 & 2 && A.throwUnsupportedOperation(output); + t5 = output.length; + if (!(outputIndex < t5)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t4); + outputIndex = outputIndex0 + 1; + t4 = bits >>> 12 & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + if (!(outputIndex0 < t5)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t4); + outputIndex0 = outputIndex + 1; + t4 = bits >>> 6 & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + if (!(outputIndex < t5)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t4); + outputIndex = outputIndex0 + 1; + t4 = bits & 63; + if (!(t4 < t2)) + return A.ioore(alphabet, t4); + if (!(outputIndex0 < t5)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t4); + bits = 0; + expectedChars = 3; + } + } + if (byteOr >= 0 && byteOr <= 255) { + if (expectedChars < 3) { + outputIndex0 = outputIndex + 1; + outputIndex1 = outputIndex0 + 1; + if (3 - expectedChars === 1) { + t1 = bits >>> 2 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + t3 & 2 && A.throwUnsupportedOperation(output); + t3 = output.length; + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t1); + t1 = bits << 4 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + if (!(outputIndex0 < t3)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t1); + outputIndex = outputIndex1 + 1; + if (!(outputIndex1 < t3)) + return A.ioore(output, outputIndex1); + output[outputIndex1] = 61; + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = 61; + } else { + t1 = bits >>> 10 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + t3 & 2 && A.throwUnsupportedOperation(output); + t3 = output.length; + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = alphabet.charCodeAt(t1); + t1 = bits >>> 4 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + if (!(outputIndex0 < t3)) + return A.ioore(output, outputIndex0); + output[outputIndex0] = alphabet.charCodeAt(t1); + outputIndex = outputIndex1 + 1; + t1 = bits << 2 & 63; + if (!(t1 < t2)) + return A.ioore(alphabet, t1); + if (!(outputIndex1 < t3)) + return A.ioore(output, outputIndex1); + output[outputIndex1] = alphabet.charCodeAt(t1); + if (!(outputIndex < t3)) + return A.ioore(output, outputIndex); + output[outputIndex] = 61; + } + return 0; + } + return (bits << 2 | 3 - expectedChars) >>> 0; + } + for (i = start; i < end;) { + if (!(i < t1)) + return A.ioore(bytes, i); + byte = bytes[i]; + if (byte > 255) + break; + ++i; + } + if (!(i < t1)) + return A.ioore(bytes, i); + throw A.wrapException(A.ArgumentError$value(bytes, "Not a byte value at index " + i + ": 0x" + B.JSInt_methods.toRadixString$1(bytes[i], 16), null)); + }, + _Base64Decoder_decodeChunk(input, start, end, output, outIndex, state) { + var t1, t2, t3, i, charOr, char, t4, code, outIndex0, expectedPadding, + _s31_ = "Invalid encoding before padding", + _s17_ = "Invalid character", + bits = B.JSInt_methods._shrOtherPositive$1(state, 2), + count = state & 3, + inverseAlphabet = $.$get$_Base64Decoder__inverseAlphabet(); + for (t1 = input.length, t2 = inverseAlphabet.length, t3 = output.$flags | 0, i = start, charOr = 0; i < end; ++i) { + if (!(i < t1)) + return A.ioore(input, i); + char = input.charCodeAt(i); + charOr |= char; + t4 = char & 127; + if (!(t4 < t2)) + return A.ioore(inverseAlphabet, t4); + code = inverseAlphabet[t4]; + if (code >= 0) { + bits = (bits << 6 | code) & 16777215; + count = count + 1 & 3; + if (count === 0) { + outIndex0 = outIndex + 1; + t3 & 2 && A.throwUnsupportedOperation(output); + t4 = output.length; + if (!(outIndex < t4)) + return A.ioore(output, outIndex); + output[outIndex] = bits >>> 16 & 255; + outIndex = outIndex0 + 1; + if (!(outIndex0 < t4)) + return A.ioore(output, outIndex0); + output[outIndex0] = bits >>> 8 & 255; + outIndex0 = outIndex + 1; + if (!(outIndex < t4)) + return A.ioore(output, outIndex); + output[outIndex] = bits & 255; + outIndex = outIndex0; + bits = 0; + } + continue; + } else if (code === -1 && count > 1) { + if (charOr > 127) + break; + if (count === 3) { + if ((bits & 3) !== 0) + throw A.wrapException(A.FormatException$(_s31_, input, i)); + outIndex0 = outIndex + 1; + t3 & 2 && A.throwUnsupportedOperation(output); + t1 = output.length; + if (!(outIndex < t1)) + return A.ioore(output, outIndex); + output[outIndex] = bits >>> 10; + if (!(outIndex0 < t1)) + return A.ioore(output, outIndex0); + output[outIndex0] = bits >>> 2; + } else { + if ((bits & 15) !== 0) + throw A.wrapException(A.FormatException$(_s31_, input, i)); + t3 & 2 && A.throwUnsupportedOperation(output); + if (!(outIndex < output.length)) + return A.ioore(output, outIndex); + output[outIndex] = bits >>> 4; + } + expectedPadding = (3 - count) * 3; + if (char === 37) + expectedPadding += 2; + return A._Base64Decoder__checkPadding(input, i + 1, end, -expectedPadding - 1); + } + throw A.wrapException(A.FormatException$(_s17_, input, i)); + } + if (charOr >= 0 && charOr <= 127) + return (bits << 2 | count) >>> 0; + for (i = start; i < end; ++i) { + if (!(i < t1)) + return A.ioore(input, i); + if (input.charCodeAt(i) > 127) + break; + } + throw A.wrapException(A.FormatException$(_s17_, input, i)); + }, + _Base64Decoder__allocateBuffer(input, start, end, state) { + var paddingStart = A._Base64Decoder__trimPaddingChars(input, start, end), + $length = (state & 3) + (paddingStart - start), + bufferLength = B.JSInt_methods._shrOtherPositive$1($length, 2) * 3, + remainderLength = $length & 3; + if (remainderLength !== 0 && paddingStart < end) + bufferLength += remainderLength - 1; + if (bufferLength > 0) + return new Uint8Array(bufferLength); + return $.$get$_Base64Decoder__emptyBuffer(); + }, + _Base64Decoder__trimPaddingChars(input, start, end) { + var char, + t1 = input.length, + newEnd = end, + index = newEnd, + padding = 0; + while (true) { + if (!(index > start && padding < 2)) + break; + c$0: { + --index; + if (!(index >= 0 && index < t1)) + return A.ioore(input, index); + char = input.charCodeAt(index); + if (char === 61) { + ++padding; + newEnd = index; + break c$0; + } + if ((char | 32) === 100) { + if (index === start) + break; + --index; + if (!(index >= 0 && index < t1)) + return A.ioore(input, index); + char = input.charCodeAt(index); + } + if (char === 51) { + if (index === start) + break; + --index; + if (!(index >= 0 && index < t1)) + return A.ioore(input, index); + char = input.charCodeAt(index); + } + if (char === 37) { + ++padding; + newEnd = index; + break c$0; + } + break; + } + } + return newEnd; + }, + _Base64Decoder__checkPadding(input, start, end, state) { + var expectedPadding, t1, char; + if (start === end) + return state; + expectedPadding = -state - 1; + for (t1 = input.length; expectedPadding > 0;) { + if (!(start < t1)) + return A.ioore(input, start); + char = input.charCodeAt(start); + if (expectedPadding === 3) { + if (char === 61) { + expectedPadding -= 3; + ++start; + break; + } + if (char === 37) { + --expectedPadding; + ++start; + if (start === end) + break; + if (!(start < t1)) + return A.ioore(input, start); + char = input.charCodeAt(start); + } else + break; + } + if ((expectedPadding > 3 ? expectedPadding - 3 : expectedPadding) === 2) { + if (char !== 51) + break; + ++start; + --expectedPadding; + if (start === end) + break; + if (!(start < t1)) + return A.ioore(input, start); + char = input.charCodeAt(start); + } + if ((char | 32) !== 100) + break; + ++start; + --expectedPadding; + if (start === end) + break; + } + if (start !== end) + throw A.wrapException(A.FormatException$("Invalid padding character", input, start)); + return -expectedPadding - 1; + }, + Base64Codec: function Base64Codec() { + }, + Base64Encoder: function Base64Encoder() { + }, + _Base64Encoder: function _Base64Encoder(t0) { + this._convert$_state = 0; + this._alphabet = t0; + }, + Base64Decoder: function Base64Decoder() { + }, + _Base64Decoder: function _Base64Decoder() { + this._convert$_state = 0; + }, + Codec: function Codec() { + }, + Converter: function Converter() { + }, + Error__throw(error, stackTrace) { + error = A.wrapException(error); + if (error == null) + error = type$.Object._as(error); + error.stack = stackTrace.toString$0(0); + throw error; + throw A.wrapException("unreachable"); + }, + List_List$filled($length, fill, growable, $E) { + var i, + result = J.JSArray_JSArray$fixed($length, $E); + if ($length !== 0 && fill != null) + for (i = 0; i < $length; ++i) + result[i] = fill; + return result; + }, + List_List$of(elements, growable, $E) { + var t1 = A.List_List$_of(elements, $E); + return t1; + }, + List_List$_of(elements, $E) { + var list, t1; + if (Array.isArray(elements)) + return A._setArrayType(elements.slice(0), $E._eval$1("JSArray<0>")); + list = A._setArrayType([], $E._eval$1("JSArray<0>")); + for (t1 = J.get$iterator$ax(elements); t1.moveNext$0();) + B.JSArray_methods.add$1(list, t1.get$current()); + return list; + }, + String_String$fromCharCodes(charCodes) { + var t1; + A.RangeError_checkNotNegative(0, "start"); + t1 = A.String__stringFromUint8List(charCodes, 0, null); + return t1; + }, + String__stringFromUint8List(charCodes, start, endOrNull) { + var len = charCodes.length; + if (start >= len) + return ""; + return A.Primitives_stringFromNativeUint8List(charCodes, start, len); + }, + StringBuffer__writeAll(string, objects, separator) { + var iterator = J.get$iterator$ax(objects); + if (!iterator.moveNext$0()) + return string; + if (separator.length === 0) { + do + string += A.S(iterator.get$current()); + while (iterator.moveNext$0()); + } else { + string += A.S(iterator.get$current()); + for (; iterator.moveNext$0();) + string = string + separator + A.S(iterator.get$current()); + } + return string; + }, + NoSuchMethodError_NoSuchMethodError$withInvocation(receiver, invocation) { + return new A.NoSuchMethodError(receiver, invocation.get$memberName(), invocation.get$positionalArguments(), invocation.get$namedArguments()); + }, + StackTrace_current() { + return A.getTraceFromException(new Error()); + }, + DateTime__fourDigits(n) { + var absN = Math.abs(n), + sign = n < 0 ? "-" : ""; + if (absN >= 1000) + return "" + n; + if (absN >= 100) + return sign + "0" + absN; + if (absN >= 10) + return sign + "00" + absN; + return sign + "000" + absN; + }, + DateTime__threeDigits(n) { + if (n >= 100) + return "" + n; + if (n >= 10) + return "0" + n; + return "00" + n; + }, + DateTime__twoDigits(n) { + if (n >= 10) + return "" + n; + return "0" + n; + }, + Error_safeToString(object) { + if (typeof object == "number" || A._isBool(object) || object == null) + return J.toString$0$(object); + if (typeof object == "string") + return JSON.stringify(object); + return A.Primitives_safeToString(object); + }, + Error_throwWithStackTrace(error, stackTrace) { + A.checkNotNullable(error, "error", type$.Object); + A.checkNotNullable(stackTrace, "stackTrace", type$.StackTrace); + A.Error__throw(error, stackTrace); + }, + AssertionError$(message) { + return new A.AssertionError(message); + }, + ArgumentError$(message, $name) { + return new A.ArgumentError(false, null, $name, message); + }, + ArgumentError$value(value, $name, message) { + return new A.ArgumentError(true, value, $name, message); + }, + RangeError$value(value, $name) { + return new A.RangeError(null, null, true, value, $name, "Value not in range"); + }, + RangeError$range(invalidValue, minValue, maxValue, $name, message) { + return new A.RangeError(minValue, maxValue, true, invalidValue, $name, "Invalid value"); + }, + RangeError_checkValidRange(start, end, $length) { + if (0 > start || start > $length) + throw A.wrapException(A.RangeError$range(start, 0, $length, "start", null)); + if (end != null) { + if (start > end || end > $length) + throw A.wrapException(A.RangeError$range(end, start, $length, "end", null)); + return end; + } + return $length; + }, + RangeError_checkNotNegative(value, $name) { + if (value < 0) + throw A.wrapException(A.RangeError$range(value, 0, null, $name, null)); + return value; + }, + IndexError$withLength(invalidValue, $length, indexable, $name) { + return new A.IndexError($length, true, invalidValue, $name, "Index out of range"); + }, + UnsupportedError$(message) { + return new A.UnsupportedError(message); + }, + UnimplementedError$(message) { + return new A.UnimplementedError(message); + }, + StateError$(message) { + return new A.StateError(message); + }, + ConcurrentModificationError$(modifiedObject) { + return new A.ConcurrentModificationError(modifiedObject); + }, + Exception_Exception(message) { + return new A._Exception(message); + }, + FormatException$(message, source, offset) { + return new A.FormatException(message, source, offset); + }, + Iterable_iterableToShortString(iterable, leftDelimiter, rightDelimiter) { + var parts, t1; + if (A.isToStringVisiting(iterable)) { + if (leftDelimiter === "(" && rightDelimiter === ")") + return "(...)"; + return leftDelimiter + "..." + rightDelimiter; + } + parts = A._setArrayType([], type$.JSArray_String); + B.JSArray_methods.add$1($.toStringVisiting, iterable); + try { + A._iterablePartsToStrings(iterable, parts); + } finally { + if (0 >= $.toStringVisiting.length) + return A.ioore($.toStringVisiting, -1); + $.toStringVisiting.pop(); + } + t1 = A.StringBuffer__writeAll(leftDelimiter, type$.Iterable_dynamic._as(parts), ", ") + rightDelimiter; + return t1.charCodeAt(0) == 0 ? t1 : t1; + }, + Iterable_iterableToFullString(iterable, leftDelimiter, rightDelimiter) { + var buffer, t1; + if (A.isToStringVisiting(iterable)) + return leftDelimiter + "..." + rightDelimiter; + buffer = new A.StringBuffer(leftDelimiter); + B.JSArray_methods.add$1($.toStringVisiting, iterable); + try { + t1 = buffer; + t1._contents = A.StringBuffer__writeAll(t1._contents, iterable, ", "); + } finally { + if (0 >= $.toStringVisiting.length) + return A.ioore($.toStringVisiting, -1); + $.toStringVisiting.pop(); + } + buffer._contents += rightDelimiter; + t1 = buffer._contents; + return t1.charCodeAt(0) == 0 ? t1 : t1; + }, + _iterablePartsToStrings(iterable, parts) { + var next, ultimateString, penultimateString, penultimate, ultimate, ultimate0, elision, + it = iterable.get$iterator(iterable), + $length = 0, count = 0; + while (true) { + if (!($length < 80 || count < 3)) + break; + if (!it.moveNext$0()) + return; + next = A.S(it.get$current()); + B.JSArray_methods.add$1(parts, next); + $length += next.length + 2; + ++count; + } + if (!it.moveNext$0()) { + if (count <= 5) + return; + if (0 >= parts.length) + return A.ioore(parts, -1); + ultimateString = parts.pop(); + if (0 >= parts.length) + return A.ioore(parts, -1); + penultimateString = parts.pop(); + } else { + penultimate = it.get$current(); + ++count; + if (!it.moveNext$0()) { + if (count <= 4) { + B.JSArray_methods.add$1(parts, A.S(penultimate)); + return; + } + ultimateString = A.S(penultimate); + if (0 >= parts.length) + return A.ioore(parts, -1); + penultimateString = parts.pop(); + $length += ultimateString.length + 2; + } else { + ultimate = it.get$current(); + ++count; + for (; it.moveNext$0(); penultimate = ultimate, ultimate = ultimate0) { + ultimate0 = it.get$current(); + ++count; + if (count > 100) { + while (true) { + if (!($length > 75 && count > 3)) + break; + if (0 >= parts.length) + return A.ioore(parts, -1); + $length -= parts.pop().length + 2; + --count; + } + B.JSArray_methods.add$1(parts, "..."); + return; + } + } + penultimateString = A.S(penultimate); + ultimateString = A.S(ultimate); + $length += ultimateString.length + penultimateString.length + 4; + } + } + if (count > parts.length + 2) { + $length += 5; + elision = "..."; + } else + elision = null; + while (true) { + if (!($length > 80 && parts.length > 3)) + break; + if (0 >= parts.length) + return A.ioore(parts, -1); + $length -= parts.pop().length + 2; + if (elision == null) { + $length += 5; + elision = "..."; + } + } + if (elision != null) + B.JSArray_methods.add$1(parts, elision); + B.JSArray_methods.add$1(parts, penultimateString); + B.JSArray_methods.add$1(parts, ultimateString); + }, + Object_hash(object1, object2) { + var t1 = B.JSInt_methods.get$hashCode(object1); + object2 = B.JSInt_methods.get$hashCode(object2); + object2 = A.SystemHash_finish(A.SystemHash_combine(A.SystemHash_combine($.$get$_hashSeed(), t1), object2)); + return object2; + }, + NoSuchMethodError_toString_closure: function NoSuchMethodError_toString_closure(t0, t1) { + this._box_0 = t0; + this.sb = t1; + }, + DateTime: function DateTime(t0, t1, t2) { + this._value = t0; + this._microsecond = t1; + this.isUtc = t2; + }, + _Enum: function _Enum() { + }, + Error: function Error() { + }, + AssertionError: function AssertionError(t0) { + this.message = t0; + }, + TypeError: function TypeError() { + }, + ArgumentError: function ArgumentError(t0, t1, t2, t3) { + var _ = this; + _._hasValue = t0; + _.invalidValue = t1; + _.name = t2; + _.message = t3; + }, + RangeError: function RangeError(t0, t1, t2, t3, t4, t5) { + var _ = this; + _.start = t0; + _.end = t1; + _._hasValue = t2; + _.invalidValue = t3; + _.name = t4; + _.message = t5; + }, + IndexError: function IndexError(t0, t1, t2, t3, t4) { + var _ = this; + _.length = t0; + _._hasValue = t1; + _.invalidValue = t2; + _.name = t3; + _.message = t4; + }, + NoSuchMethodError: function NoSuchMethodError(t0, t1, t2, t3) { + var _ = this; + _._core$_receiver = t0; + _._core$_memberName = t1; + _._core$_arguments = t2; + _._namedArguments = t3; + }, + UnsupportedError: function UnsupportedError(t0) { + this.message = t0; + }, + UnimplementedError: function UnimplementedError(t0) { + this.message = t0; + }, + StateError: function StateError(t0) { + this.message = t0; + }, + ConcurrentModificationError: function ConcurrentModificationError(t0) { + this.modifiedObject = t0; + }, + OutOfMemoryError: function OutOfMemoryError() { + }, + StackOverflowError: function StackOverflowError() { + }, + _Exception: function _Exception(t0) { + this.message = t0; + }, + FormatException: function FormatException(t0, t1, t2) { + this.message = t0; + this.source = t1; + this.offset = t2; + }, + Iterable: function Iterable() { + }, + Null: function Null() { + }, + Object: function Object() { + }, + _StringStackTrace: function _StringStackTrace() { + }, + StringBuffer: function StringBuffer(t0) { + this._contents = t0; + }, + _convertDartFunctionFast(f) { + var ret, + existing = f.$dart_jsFunction; + if (existing != null) + return existing; + ret = function(_call, f) { + return function() { + return _call(f, Array.prototype.slice.apply(arguments)); + }; + }(A._callDartFunctionFast, f); + ret[$.$get$DART_CLOSURE_PROPERTY_NAME()] = f; + f.$dart_jsFunction = ret; + return ret; + }, + _callDartFunctionFast(callback, $arguments) { + type$.List_dynamic._as($arguments); + type$.Function._as(callback); + return A.Primitives_applyFunction(callback, $arguments, null); + }, + allowInterop(f, $F) { + if (typeof f == "function") + return f; + else + return $F._as(A._convertDartFunctionFast(f)); + }, + _functionToJS1(f) { + var result; + if (typeof f == "function") + throw A.wrapException(A.ArgumentError$("Attempting to rewrap a JS function.", null)); + result = function(_call, f) { + return function(arg1) { + return _call(f, arg1, arguments.length); + }; + }(A._callDartFunctionFast1, f); + result[$.$get$DART_CLOSURE_PROPERTY_NAME()] = f; + return result; + }, + _callDartFunctionFast1(callback, arg1, $length) { + type$.Function._as(callback); + if (A._asInt($length) >= 1) + return callback.call$1(arg1); + return callback.call$0(); + }, + _noJsifyRequired(o) { + return o == null || A._isBool(o) || typeof o == "number" || typeof o == "string" || type$.Int8List._is(o) || type$.Uint8List._is(o) || type$.Uint8ClampedList._is(o) || type$.Int16List._is(o) || type$.Uint16List._is(o) || type$.Int32List._is(o) || type$.Uint32List._is(o) || type$.Float32List._is(o) || type$.Float64List._is(o) || type$.ByteBuffer._is(o) || type$.ByteData._is(o); + }, + jsify(object) { + if (A._noJsifyRequired(object)) + return object; + return new A.jsify__convert(new A._IdentityHashMap(type$._IdentityHashMap_of_nullable_Object_and_nullable_Object)).call$1(object); + }, + callMethod(o, method, args, $T) { + return $T._as(o[method].apply(o, args)); + }, + promiseToFuture(jsPromise, $T) { + var t1 = new A._Future($.Zone__current, $T._eval$1("_Future<0>")), + completer = new A._AsyncCompleter(t1, $T._eval$1("_AsyncCompleter<0>")); + jsPromise.then(A.convertDartClosureToJS(new A.promiseToFuture_closure(completer, $T), 1), A.convertDartClosureToJS(new A.promiseToFuture_closure0(completer), 1)); + return t1; + }, + _noDartifyRequired(o) { + return o == null || typeof o === "boolean" || typeof o === "number" || typeof o === "string" || o instanceof Int8Array || o instanceof Uint8Array || o instanceof Uint8ClampedArray || o instanceof Int16Array || o instanceof Uint16Array || o instanceof Int32Array || o instanceof Uint32Array || o instanceof Float32Array || o instanceof Float64Array || o instanceof ArrayBuffer || o instanceof DataView; + }, + dartify(o) { + if (A._noDartifyRequired(o)) + return o; + return new A.dartify_convert(new A._IdentityHashMap(type$._IdentityHashMap_of_nullable_Object_and_nullable_Object)).call$1(o); + }, + jsify__convert: function jsify__convert(t0) { + this._convertedObjects = t0; + }, + promiseToFuture_closure: function promiseToFuture_closure(t0, t1) { + this.completer = t0; + this.T = t1; + }, + promiseToFuture_closure0: function promiseToFuture_closure0(t0) { + this.completer = t0; + }, + dartify_convert: function dartify_convert(t0) { + this._convertedObjects = t0; + }, + NullRejectionException: function NullRejectionException(t0) { + this.isUndefined = t0; + }, + _JSSecureRandom: function _JSSecureRandom(t0) { + this._math$_buffer = t0; + }, + findNALUIndices(stream) { + var start, pos0, t1, end, + result = A._setArrayType([], type$.JSArray_int), + pos = stream.length, + searchLength = pos - 2; + for (start = 0, pos0 = 0; pos0 < searchLength; start = pos0) { + while (true) { + if (pos0 < searchLength) { + if (!(pos0 >= 0)) + return A.ioore(stream, pos0); + t1 = !(stream[pos0] === 0 && stream[pos0 + 1] === 0 && stream[pos0 + 2] === 1); + } else + t1 = false; + if (!t1) + break; + ++pos0; + } + if (pos0 >= searchLength) + pos0 = pos; + end = pos0; + while (true) { + if (end > start) { + t1 = end - 1; + if (!(t1 >= 0)) + return A.ioore(stream, t1); + t1 = stream[t1] === 0; + } else + t1 = false; + if (!t1) + break; + --end; + } + if (start === 0) { + if (end !== start) + throw A.wrapException(A.Exception_Exception("byte stream contains leading data")); + } else + B.JSArray_methods.add$1(result, start); + pos0 += 3; + } + return result; + }, + CryptorError: function CryptorError(t0) { + this._name = t0; + }, + FrameInfo: function FrameInfo(t0, t1, t2, t3) { + var _ = this; + _.frameType = t0; + _.ssrc = t1; + _.timestamp = t2; + _.buffer = t3; + }, + FrameCryptor: function FrameCryptor(t0, t1, t2, t3, t4, t5, t6) { + var _ = this; + _.sendCounts = t0; + _.participantIdentity = t1; + _.trackId = t2; + _.codec = null; + _.keyHandler = t3; + _.__FrameCryptor_kind_A = $; + _._enabled = false; + _.lastError = t4; + _.currentKeyIndex = 0; + _.worker = t5; + _.sifGuard = t6; + }, + FrameCryptor_decodeFunction_decryptFrameInternal: function FrameCryptor_decodeFunction_decryptFrameInternal(t0, t1, t2, t3, t4, t5, t6) { + var _ = this; + _._box_1 = t0; + _._box_0 = t1; + _.$this = t2; + _.iv = t3; + _.srcFrame = t4; + _.headerLength = t5; + _.ivLength = t6; + }, + FrameCryptor_decodeFunction_ratchedKeyInternal: function FrameCryptor_decodeFunction_ratchedKeyInternal(t0, t1, t2, t3) { + var _ = this; + _._box_1 = t0; + _._box_0 = t1; + _.$this = t2; + _.decryptFrameInternal = t3; + }, + ParticipantKeyHandler$(keyOptions, participantIdentity, worker) { + var t1 = new A.ParticipantKeyHandler(keyOptions, worker, participantIdentity), + t2 = keyOptions.keyRingSze; + if (t2 <= 0 || t2 > 255) + A.throwExpression(A.Exception_Exception("Invalid key ring size")); + t1.set$__ParticipantKeyHandler_cryptoKeyRing_A(type$.List_nullable_KeySet._as(A.List_List$filled(t2, null, false, type$.nullable_KeySet))); + return t1; + }, + KeyOptions: function KeyOptions(t0, t1, t2, t3, t4, t5, t6) { + var _ = this; + _.sharedKey = t0; + _.ratchetSalt = t1; + _.ratchetWindowSize = t2; + _.failureTolerance = t3; + _.uncryptedMagicBytes = t4; + _.keyRingSze = t5; + _.discardFrameWhenCryptorNotReady = t6; + }, + KeyProvider: function KeyProvider(t0, t1, t2, t3) { + var _ = this; + _.worker = t0; + _.keyProviderOptions = t1; + _.participantKeys = t2; + _.sharedKeyHandler = null; + _.sharedKey = t3; + }, + KeySet: function KeySet(t0, t1) { + this.material = t0; + this.encryptionKey = t1; + }, + ParticipantKeyHandler: function ParticipantKeyHandler(t0, t1, t2) { + var _ = this; + _.currentKeyIndex = 0; + _.__ParticipantKeyHandler_cryptoKeyRing_A = $; + _._hasValidKey = false; + _.keyOptions = t0; + _.worker = t1; + _.participantIdentity = t2; + _._decryptionFailureCount = 0; + }, + SifGuard: function SifGuard() { + var _ = this; + _.consecutiveSifCount = 0; + _.sifSequenceStartedAt = null; + _.userFramesSinceSif = _.lastSifReceivedAt = 0; + }, + getTrackCryptor(participantIdentity, trackId, keyProvider) { + var t1, t2, _null = null, + cryptor = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.getTrackCryptor_closure(trackId), type$.FrameCryptor); + if (cryptor == null) { + $.$get$logger().log$4(B.Level_INFO_800, "creating new cryptor for " + participantIdentity + ", trackId " + trackId, _null, _null); + t1 = type$.JSObject._as(self.self); + t2 = type$.int; + cryptor = new A.FrameCryptor(A.LinkedHashMap_LinkedHashMap$_empty(t2, t2), participantIdentity, trackId, keyProvider.getParticipantKeyHandler$1(participantIdentity), B.CryptorError_0, t1, new A.SifGuard()); + B.JSArray_methods.add$1($.participantCryptors, cryptor); + } else if (participantIdentity !== cryptor.participantIdentity) { + t1 = keyProvider.getParticipantKeyHandler$1(participantIdentity); + if (cryptor.lastError !== B.CryptorError_1) { + $.$get$logger().log$4(B.Level_INFO_800, "setParticipantId: lastError != CryptorError.kOk, reset state to kNew", _null, _null); + cryptor.lastError = B.CryptorError_0; + } + cryptor.participantIdentity = participantIdentity; + cryptor.keyHandler = t1; + cryptor.sifGuard.reset$0(); + } + return cryptor; + }, + unsetCryptorParticipant(trackId) { + var t1 = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.unsetCryptorParticipant_closure(trackId), type$.FrameCryptor); + if (t1 != null) + t1.participantIdentity = null; + }, + main() { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + t2, t3, t1; + var $async$main = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $.$get$Logger_root(); + if (t1.parent != null) + A.throwExpression(A.UnsupportedError$('Please set "hierarchicalLoggingEnabled" to true if you want to change the level on a non-root logger.')); + J.$eq$(t1._level, B.Level_WARNING_900); + t1._level = B.Level_WARNING_900; + t1._getStream$0().listen$1(new A.main_closure()); + t1 = $.$get$logger(); + t1.log$4(B.Level_INFO_800, "Worker created", null, null); + t2 = self; + t3 = type$.JSObject; + if ("RTCTransformEvent" in t3._as(t2.self)) { + t1.log$4(B.Level_INFO_800, "setup RTCTransformEvent event handler", null, null); + t3._as(t2.self).onrtctransform = A._functionToJS1(new A.main_closure0()); + } + t3._as(t2.self).onmessage = A._functionToJS1(new A.main_closure1(new A.main_closure2())); + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$main, $async$completer); + }, + getTrackCryptor_closure: function getTrackCryptor_closure(t0) { + this.trackId = t0; + }, + unsetCryptorParticipant_closure: function unsetCryptorParticipant_closure(t0) { + this.trackId = t0; + }, + main_closure: function main_closure() { + }, + main_closure0: function main_closure0() { + }, + main_closure2: function main_closure2() { + }, + main__closure: function main__closure(t0) { + this.trackId = t0; + }, + main__closure0: function main__closure0(t0) { + this.trackId = t0; + }, + main__closure1: function main__closure1(t0) { + this.trackId = t0; + }, + main__closure2: function main__closure2(t0) { + this.trackId = t0; + }, + main_closure1: function main_closure1(t0) { + this.handleMessage = t0; + }, + Level: function Level(t0, t1) { + this.name = t0; + this.value = t1; + }, + LogRecord: function LogRecord(t0, t1, t2) { + this.level = t0; + this.message = t1; + this.loggerName = t2; + }, + Logger_Logger($name) { + return $.Logger__loggers.putIfAbsent$2($name, new A.Logger_Logger_closure($name)); + }, + Logger: function Logger(t0, t1, t2) { + var _ = this; + _.name = t0; + _.parent = t1; + _._level = null; + _._children = t2; + _._controller = null; + }, + Logger_Logger_closure: function Logger_Logger_closure(t0) { + this.name = t0; + }, + printString(string) { + if (typeof dartPrint == "function") { + dartPrint(string); + return; + } + if (typeof console == "object" && typeof console.log != "undefined") { + console.log(string); + return; + } + if (typeof print == "function") { + print(string); + return; + } + throw "Unable to print message: " + String(string); + }, + throwLateFieldNI(fieldName) { + A.throwExpressionWithWrapper(new A.LateError("Field '" + fieldName + "' has not been initialized."), new Error()); + }, + throwLateFieldADI(fieldName) { + A.throwExpressionWithWrapper(new A.LateError("Field '" + fieldName + "' has been assigned during initialization."), new Error()); + }, + IterableExtension_firstWhereOrNull(_this, test, $T) { + var t1, _i, element; + for (t1 = _this.length, _i = 0; _i < _this.length; _this.length === t1 || (0, A.throwConcurrentModificationError)(_this), ++_i) { + element = _this[_i]; + if (A.boolConversionCheck(test.call$1(element))) + return element; + } + return null; + }, + getAlgoOptions(algorithmName, salt) { + switch (algorithmName) { + case "HKDF": + return A.LinkedHashMap_LinkedHashMap$_literal(["name", "HKDF", "salt", salt, "hash", "SHA-256", "info", new Uint8Array(128)], type$.String, type$.dynamic); + case "PBKDF2": + return A.LinkedHashMap_LinkedHashMap$_literal(["name", "PBKDF2", "salt", salt, "hash", "SHA-256", "iterations", 100000], type$.String, type$.dynamic); + default: + throw A.wrapException(A.Exception_Exception("algorithm " + algorithmName + " is currently unsupported")); + } + } + }, + B = {}; + var holders = [A, J, B]; + var $ = {}; + A.JS_CONST.prototype = {}; + J.Interceptor.prototype = { + $eq(receiver, other) { + return receiver === other; + }, + get$hashCode(receiver) { + return A.Primitives_objectHashCode(receiver); + }, + toString$0(receiver) { + return "Instance of '" + A.Primitives_objectTypeName(receiver) + "'"; + }, + noSuchMethod$1(receiver, invocation) { + throw A.wrapException(A.NoSuchMethodError_NoSuchMethodError$withInvocation(receiver, type$.Invocation._as(invocation))); + }, + get$runtimeType(receiver) { + return A.createRuntimeType(A._instanceTypeFromConstructor(this)); + } + }; + J.JSBool.prototype = { + toString$0(receiver) { + return String(receiver); + }, + get$hashCode(receiver) { + return receiver ? 519018 : 218159; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(type$.bool); + }, + $isTrustedGetRuntimeType: 1, + $isbool: 1 + }; + J.JSNull.prototype = { + $eq(receiver, other) { + return null == other; + }, + toString$0(receiver) { + return "null"; + }, + get$hashCode(receiver) { + return 0; + }, + $isTrustedGetRuntimeType: 1, + $isNull: 1 + }; + J.JavaScriptObject.prototype = {$isJSObject: 1}; + J.LegacyJavaScriptObject.prototype = { + get$hashCode(receiver) { + return 0; + }, + get$runtimeType(receiver) { + return B.Type_JSObject_ttY; + }, + toString$0(receiver) { + return String(receiver); + } + }; + J.PlainJavaScriptObject.prototype = {}; + J.UnknownJavaScriptObject.prototype = {}; + J.JavaScriptFunction.prototype = { + toString$0(receiver) { + var dartClosure = receiver[$.$get$DART_CLOSURE_PROPERTY_NAME()]; + if (dartClosure == null) + return this.super$LegacyJavaScriptObject$toString(receiver); + return "JavaScript function for " + J.toString$0$(dartClosure); + }, + $isFunction: 1 + }; + J.JavaScriptBigInt.prototype = { + get$hashCode(receiver) { + return 0; + }, + toString$0(receiver) { + return String(receiver); + } + }; + J.JavaScriptSymbol.prototype = { + get$hashCode(receiver) { + return 0; + }, + toString$0(receiver) { + return String(receiver); + } + }; + J.JSArray.prototype = { + add$1(receiver, value) { + A._arrayInstanceType(receiver)._precomputed1._as(value); + receiver.$flags & 1 && A.throwUnsupportedOperation(receiver, 29); + receiver.push(value); + }, + addAll$1(receiver, collection) { + var t1; + A._arrayInstanceType(receiver)._eval$1("Iterable<1>")._as(collection); + receiver.$flags & 1 && A.throwUnsupportedOperation(receiver, "addAll", 2); + if (Array.isArray(collection)) { + this._addAllFromArray$1(receiver, collection); + return; + } + for (t1 = J.get$iterator$ax(collection); t1.moveNext$0();) + receiver.push(t1.get$current()); + }, + _addAllFromArray$1(receiver, array) { + var len, i; + type$.JSArray_dynamic._as(array); + len = array.length; + if (len === 0) + return; + if (receiver === array) + throw A.wrapException(A.ConcurrentModificationError$(receiver)); + for (i = 0; i < len; ++i) + receiver.push(array[i]); + }, + map$1$1(receiver, f, $T) { + var t1 = A._arrayInstanceType(receiver); + return new A.MappedListIterable(receiver, t1._bind$1($T)._eval$1("1(2)")._as(f), t1._eval$1("@<1>")._bind$1($T)._eval$1("MappedListIterable<1,2>")); + }, + elementAt$1(receiver, index) { + if (!(index >= 0 && index < receiver.length)) + return A.ioore(receiver, index); + return receiver[index]; + }, + toString$0(receiver) { + return A.Iterable_iterableToFullString(receiver, "[", "]"); + }, + get$iterator(receiver) { + return new J.ArrayIterator(receiver, receiver.length, A._arrayInstanceType(receiver)._eval$1("ArrayIterator<1>")); + }, + get$hashCode(receiver) { + return A.Primitives_objectHashCode(receiver); + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + if (!(index >= 0 && index < receiver.length)) + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + return receiver[index]; + }, + $indexSet(receiver, index, value) { + A._arrayInstanceType(receiver)._precomputed1._as(value); + receiver.$flags & 2 && A.throwUnsupportedOperation(receiver); + if (!(index >= 0 && index < receiver.length)) + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + receiver[index] = value; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(A._arrayInstanceType(receiver)); + }, + $isEfficientLengthIterable: 1, + $isIterable: 1, + $isList: 1 + }; + J.JSUnmodifiableArray.prototype = {}; + J.ArrayIterator.prototype = { + get$current() { + var t1 = this._current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var t2, _this = this, + t1 = _this._iterable, + $length = t1.length; + if (_this._length !== $length) { + t1 = A.throwConcurrentModificationError(t1); + throw A.wrapException(t1); + } + t2 = _this._index; + if (t2 >= $length) { + _this.set$_current(null); + return false; + } + _this.set$_current(t1[t2]); + ++_this._index; + return true; + }, + set$_current(_current) { + this._current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + J.JSNumber.prototype = { + toInt$0(receiver) { + var t1; + if (receiver >= -2147483648 && receiver <= 2147483647) + return receiver | 0; + if (isFinite(receiver)) { + t1 = receiver < 0 ? Math.ceil(receiver) : Math.floor(receiver); + return t1 + 0; + } + throw A.wrapException(A.UnsupportedError$("" + receiver + ".toInt()")); + }, + toRadixString$1(receiver, radix) { + var result, t1, t2, match, exponent; + if (radix < 2 || radix > 36) + throw A.wrapException(A.RangeError$range(radix, 2, 36, "radix", null)); + result = receiver.toString(radix); + t1 = result.length; + t2 = t1 - 1; + if (!(t2 >= 0)) + return A.ioore(result, t2); + if (result.charCodeAt(t2) !== 41) + return result; + match = /^([\da-z]+)(?:\.([\da-z]+))?\(e\+(\d+)\)$/.exec(result); + if (match == null) + A.throwExpression(A.UnsupportedError$("Unexpected toString result: " + result)); + t1 = match.length; + if (1 >= t1) + return A.ioore(match, 1); + result = match[1]; + if (3 >= t1) + return A.ioore(match, 3); + exponent = +match[3]; + t1 = match[2]; + if (t1 != null) { + result += t1; + exponent -= t1.length; + } + return result + B.JSString_methods.$mul("0", exponent); + }, + toString$0(receiver) { + if (receiver === 0 && 1 / receiver < 0) + return "-0.0"; + else + return "" + receiver; + }, + get$hashCode(receiver) { + var absolute, floorLog2, factor, scaled, + intValue = receiver | 0; + if (receiver === intValue) + return intValue & 536870911; + absolute = Math.abs(receiver); + floorLog2 = Math.log(absolute) / 0.6931471805599453 | 0; + factor = Math.pow(2, floorLog2); + scaled = absolute < 1 ? absolute / factor : factor / absolute; + return ((scaled * 9007199254740992 | 0) + (scaled * 3542243181176521 | 0)) * 599197 + floorLog2 * 1259 & 536870911; + }, + $mod(receiver, other) { + var result = receiver % other; + if (result === 0) + return 0; + if (result > 0) + return result; + return result + other; + }, + _tdivFast$1(receiver, other) { + return (receiver | 0) === receiver ? receiver / other | 0 : this._tdivSlow$1(receiver, other); + }, + _tdivSlow$1(receiver, other) { + var quotient = receiver / other; + if (quotient >= -2147483648 && quotient <= 2147483647) + return quotient | 0; + if (quotient > 0) { + if (quotient !== 1 / 0) + return Math.floor(quotient); + } else if (quotient > -1 / 0) + return Math.ceil(quotient); + throw A.wrapException(A.UnsupportedError$("Result of truncating division is " + A.S(quotient) + ": " + A.S(receiver) + " ~/ " + other)); + }, + _shrOtherPositive$1(receiver, other) { + var t1; + if (receiver > 0) + t1 = this._shrBothPositive$1(receiver, other); + else { + t1 = other > 31 ? 31 : other; + t1 = receiver >> t1 >>> 0; + } + return t1; + }, + _shrBothPositive$1(receiver, other) { + return other > 31 ? 0 : receiver >>> other; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(type$.num); + }, + $isdouble: 1, + $isnum: 1 + }; + J.JSInt.prototype = { + get$runtimeType(receiver) { + return A.createRuntimeType(type$.int); + }, + $isTrustedGetRuntimeType: 1, + $isint: 1 + }; + J.JSNumNotInt.prototype = { + get$runtimeType(receiver) { + return A.createRuntimeType(type$.double); + }, + $isTrustedGetRuntimeType: 1 + }; + J.JSString.prototype = { + endsWith$1(receiver, other) { + var otherLength = other.length, + t1 = receiver.length; + if (otherLength > t1) + return false; + return other === this.substring$1(receiver, t1 - otherLength); + }, + startsWith$1(receiver, pattern) { + var otherLength = pattern.length; + if (otherLength > receiver.length) + return false; + return pattern === receiver.substring(0, otherLength); + }, + substring$2(receiver, start, end) { + return receiver.substring(start, A.RangeError_checkValidRange(start, end, receiver.length)); + }, + substring$1(receiver, start) { + return this.substring$2(receiver, start, null); + }, + $mul(receiver, times) { + var s, result; + if (0 >= times) + return ""; + if (times === 1 || receiver.length === 0) + return receiver; + if (times !== times >>> 0) + throw A.wrapException(B.C_OutOfMemoryError); + for (s = receiver, result = ""; true;) { + if ((times & 1) === 1) + result = s + result; + times = times >>> 1; + if (times === 0) + break; + s += s; + } + return result; + }, + lastIndexOf$1(receiver, pattern) { + var start = receiver.length, + t1 = pattern.length; + if (start + t1 > start) + start -= t1; + return receiver.lastIndexOf(pattern, start); + }, + toString$0(receiver) { + return receiver; + }, + get$hashCode(receiver) { + var t1, hash, i; + for (t1 = receiver.length, hash = 0, i = 0; i < t1; ++i) { + hash = hash + receiver.charCodeAt(i) & 536870911; + hash = hash + ((hash & 524287) << 10) & 536870911; + hash ^= hash >> 6; + } + hash = hash + ((hash & 67108863) << 3) & 536870911; + hash ^= hash >> 11; + return hash + ((hash & 16383) << 15) & 536870911; + }, + get$runtimeType(receiver) { + return A.createRuntimeType(type$.String); + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + if (!(index.$ge(0, 0) && index.$lt(0, receiver.length))) + throw A.wrapException(A.diagnoseIndexError(receiver, index)); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isPattern: 1, + $isString: 1 + }; + A._CopyingBytesBuilder.prototype = { + add$1(_, bytes) { + var byteCount, required, t1, t2, newSize, x, newBuffer, _this = this; + type$.List_int._as(bytes); + byteCount = bytes.length; + if (byteCount === 0) + return; + required = _this.__internal$_length + byteCount; + t1 = _this._buffer; + t2 = t1.length; + if (t2 < required) { + newSize = required * 2; + if (newSize < 1024) + newSize = 1024; + else { + x = newSize - 1; + x |= B.JSInt_methods._shrOtherPositive$1(x, 1); + x |= x >>> 2; + x |= x >>> 4; + x |= x >>> 8; + newSize = ((x | x >>> 16) >>> 0) + 1; + } + newBuffer = new Uint8Array(newSize); + B.NativeUint8List_methods.setRange$3(newBuffer, 0, t2, t1); + _this._buffer = newBuffer; + t1 = newBuffer; + } + B.NativeUint8List_methods.setRange$3(t1, _this.__internal$_length, required, bytes); + _this.__internal$_length = required; + }, + toBytes$0() { + var _this = this; + if (_this.__internal$_length === 0) + return $.$get$_CopyingBytesBuilder__emptyList(); + return new Uint8Array(A._ensureNativeList(J.asUint8List$2$x(B.NativeUint8List_methods.get$buffer(_this._buffer), _this._buffer.byteOffset, _this.__internal$_length))); + }, + get$length(_) { + return this.__internal$_length; + }, + $isBytesBuilder: 1 + }; + A.LateError.prototype = { + toString$0(_) { + return "LateInitializationError: " + this._message; + } + }; + A.SentinelValue.prototype = {}; + A.EfficientLengthIterable.prototype = {}; + A.ListIterable.prototype = { + get$iterator(_) { + var _this = this; + return new A.ListIterator(_this, _this.get$length(_this), A._instanceType(_this)._eval$1("ListIterator")); + }, + map$1$1(_, toElement, $T) { + var t1 = A._instanceType(this); + return new A.MappedListIterable(this, t1._bind$1($T)._eval$1("1(ListIterable.E)")._as(toElement), t1._eval$1("@")._bind$1($T)._eval$1("MappedListIterable<1,2>")); + } + }; + A.ListIterator.prototype = { + get$current() { + var t1 = this.__internal$_current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var t3, _this = this, + t1 = _this.__internal$_iterable, + t2 = J.getInterceptor$asx(t1), + $length = t2.get$length(t1); + if (_this.__internal$_length !== $length) + throw A.wrapException(A.ConcurrentModificationError$(t1)); + t3 = _this.__internal$_index; + if (t3 >= $length) { + _this.set$__internal$_current(null); + return false; + } + _this.set$__internal$_current(t2.elementAt$1(t1, t3)); + ++_this.__internal$_index; + return true; + }, + set$__internal$_current(_current) { + this.__internal$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.MappedIterable.prototype = { + get$iterator(_) { + var t1 = this.__internal$_iterable; + return new A.MappedIterator(t1.get$iterator(t1), this._f, A._instanceType(this)._eval$1("MappedIterator<1,2>")); + }, + get$length(_) { + var t1 = this.__internal$_iterable; + return t1.get$length(t1); + } + }; + A.EfficientLengthMappedIterable.prototype = {$isEfficientLengthIterable: 1}; + A.MappedIterator.prototype = { + moveNext$0() { + var _this = this, + t1 = _this._iterator; + if (t1.moveNext$0()) { + _this.set$__internal$_current(_this._f.call$1(t1.get$current())); + return true; + } + _this.set$__internal$_current(null); + return false; + }, + get$current() { + var t1 = this.__internal$_current; + return t1 == null ? this.$ti._rest[1]._as(t1) : t1; + }, + set$__internal$_current(_current) { + this.__internal$_current = this.$ti._eval$1("2?")._as(_current); + }, + $isIterator: 1 + }; + A.MappedListIterable.prototype = { + get$length(_) { + return J.get$length$asx(this._source); + }, + elementAt$1(_, index) { + return this._f.call$1(J.elementAt$1$ax(this._source, index)); + } + }; + A.WhereIterable.prototype = { + get$iterator(_) { + return new A.WhereIterator(J.get$iterator$ax(this.__internal$_iterable), this._f, this.$ti._eval$1("WhereIterator<1>")); + }, + map$1$1(_, toElement, $T) { + var t1 = this.$ti; + return new A.MappedIterable(this, t1._bind$1($T)._eval$1("1(2)")._as(toElement), t1._eval$1("@<1>")._bind$1($T)._eval$1("MappedIterable<1,2>")); + } + }; + A.WhereIterator.prototype = { + moveNext$0() { + var t1, t2; + for (t1 = this._iterator, t2 = this._f; t1.moveNext$0();) + if (A.boolConversionCheck(t2.call$1(t1.get$current()))) + return true; + return false; + }, + get$current() { + return this._iterator.get$current(); + }, + $isIterator: 1 + }; + A.FixedLengthListMixin.prototype = {}; + A.Symbol.prototype = { + get$hashCode(_) { + var hash = this._hashCode; + if (hash != null) + return hash; + hash = 664597 * B.JSString_methods.get$hashCode(this.__internal$_name) & 536870911; + this._hashCode = hash; + return hash; + }, + toString$0(_) { + return 'Symbol("' + this.__internal$_name + '")'; + }, + $eq(_, other) { + if (other == null) + return false; + return other instanceof A.Symbol && this.__internal$_name === other.__internal$_name; + }, + $isSymbol0: 1 + }; + A.ConstantMapView.prototype = {}; + A.ConstantMap.prototype = { + toString$0(_) { + return A.MapBase_mapToString(this); + }, + $isMap: 1 + }; + A.ConstantStringMap.prototype = { + get$length(_) { + return this._values.length; + }, + get$_keys() { + var keys = this.$keys; + if (keys == null) { + keys = Object.keys(this._jsIndex); + this.$keys = keys; + } + return keys; + }, + containsKey$1(key) { + if (typeof key != "string") + return false; + if ("__proto__" === key) + return false; + return this._jsIndex.hasOwnProperty(key); + }, + $index(_, key) { + if (!this.containsKey$1(key)) + return null; + return this._values[this._jsIndex[key]]; + }, + forEach$1(_, f) { + var keys, values, t1, i; + this.$ti._eval$1("~(1,2)")._as(f); + keys = this.get$_keys(); + values = this._values; + for (t1 = keys.length, i = 0; i < t1; ++i) + f.call$2(keys[i], values[i]); + }, + get$keys() { + return new A._KeysOrValues(this.get$_keys(), this.$ti._eval$1("_KeysOrValues<1>")); + } + }; + A._KeysOrValues.prototype = { + get$length(_) { + return this._elements.length; + }, + get$iterator(_) { + var t1 = this._elements; + return new A._KeysOrValuesOrElementsIterator(t1, t1.length, this.$ti._eval$1("_KeysOrValuesOrElementsIterator<1>")); + } + }; + A._KeysOrValuesOrElementsIterator.prototype = { + get$current() { + var t1 = this.__js_helper$_current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var _this = this, + t1 = _this.__js_helper$_index; + if (t1 >= _this.__js_helper$_length) { + _this.set$__js_helper$_current(null); + return false; + } + _this.set$__js_helper$_current(_this._elements[t1]); + ++_this.__js_helper$_index; + return true; + }, + set$__js_helper$_current(_current) { + this.__js_helper$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.JSInvocationMirror.prototype = { + get$memberName() { + var t1 = this._memberName; + if (t1 instanceof A.Symbol) + return t1; + return this._memberName = new A.Symbol(A._asString(t1)); + }, + get$positionalArguments() { + var t1, t2, argumentCount, list, index, _this = this; + if (_this.__js_helper$_kind === 1) + return B.List_empty; + t1 = _this._arguments; + t2 = J.getInterceptor$asx(t1); + argumentCount = t2.get$length(t1) - J.get$length$asx(_this._namedArgumentNames) - _this._typeArgumentCount; + if (argumentCount === 0) + return B.List_empty; + list = []; + for (index = 0; index < argumentCount; ++index) + list.push(t2.$index(t1, index)); + list.$flags = 3; + return list; + }, + get$namedArguments() { + var t1, t2, namedArgumentCount, t3, t4, namedArgumentsStartIndex, map, i, _this = this; + if (_this.__js_helper$_kind !== 0) + return B.Map_empty; + t1 = _this._namedArgumentNames; + t2 = J.getInterceptor$asx(t1); + namedArgumentCount = t2.get$length(t1); + t3 = _this._arguments; + t4 = J.getInterceptor$asx(t3); + namedArgumentsStartIndex = t4.get$length(t3) - namedArgumentCount - _this._typeArgumentCount; + if (namedArgumentCount === 0) + return B.Map_empty; + map = new A.JsLinkedHashMap(type$.JsLinkedHashMap_Symbol_dynamic); + for (i = 0; i < namedArgumentCount; ++i) + map.$indexSet(0, new A.Symbol(A._asString(t2.$index(t1, i))), t4.$index(t3, namedArgumentsStartIndex + i)); + return new A.ConstantMapView(map, type$.ConstantMapView_Symbol_dynamic); + }, + $isInvocation: 1 + }; + A.Primitives_functionNoSuchMethod_closure.prototype = { + call$2($name, argument) { + var t1; + A._asString($name); + t1 = this._box_0; + t1.names = t1.names + "$" + $name; + B.JSArray_methods.add$1(this.namedArgumentList, $name); + B.JSArray_methods.add$1(this.$arguments, argument); + ++t1.argumentCount; + }, + $signature: 12 + }; + A.TypeErrorDecoder.prototype = { + matchTypeError$1(message) { + var result, t1, _this = this, + match = new RegExp(_this._pattern).exec(message); + if (match == null) + return null; + result = Object.create(null); + t1 = _this._arguments; + if (t1 !== -1) + result.arguments = match[t1 + 1]; + t1 = _this._argumentsExpr; + if (t1 !== -1) + result.argumentsExpr = match[t1 + 1]; + t1 = _this._expr; + if (t1 !== -1) + result.expr = match[t1 + 1]; + t1 = _this._method; + if (t1 !== -1) + result.method = match[t1 + 1]; + t1 = _this._receiver; + if (t1 !== -1) + result.receiver = match[t1 + 1]; + return result; + } + }; + A.NullError.prototype = { + toString$0(_) { + return "Null check operator used on a null value"; + } + }; + A.JsNoSuchMethodError.prototype = { + toString$0(_) { + var t2, _this = this, + _s38_ = "NoSuchMethodError: method not found: '", + t1 = _this._method; + if (t1 == null) + return "NoSuchMethodError: " + _this.__js_helper$_message; + t2 = _this._receiver; + if (t2 == null) + return _s38_ + t1 + "' (" + _this.__js_helper$_message + ")"; + return _s38_ + t1 + "' on '" + t2 + "' (" + _this.__js_helper$_message + ")"; + } + }; + A.UnknownJsTypeError.prototype = { + toString$0(_) { + var t1 = this.__js_helper$_message; + return t1.length === 0 ? "Error" : "Error: " + t1; + } + }; + A.NullThrownFromJavaScriptException.prototype = { + toString$0(_) { + return "Throw of null ('" + (this._irritant === null ? "null" : "undefined") + "' from JavaScript)"; + } + }; + A.ExceptionAndStackTrace.prototype = {}; + A._StackTrace.prototype = { + toString$0(_) { + var trace, + t1 = this._trace; + if (t1 != null) + return t1; + t1 = this._exception; + trace = t1 !== null && typeof t1 === "object" ? t1.stack : null; + return this._trace = trace == null ? "" : trace; + }, + $isStackTrace: 1 + }; + A.Closure.prototype = { + toString$0(_) { + var $constructor = this.constructor, + $name = $constructor == null ? null : $constructor.name; + return "Closure '" + A.unminifyOrTag($name == null ? "unknown" : $name) + "'"; + }, + $isFunction: 1, + get$$call() { + return this; + }, + "call*": "call$1", + $requiredArgCount: 1, + $defaultValues: null + }; + A.Closure0Args.prototype = {"call*": "call$0", $requiredArgCount: 0}; + A.Closure2Args.prototype = {"call*": "call$2", $requiredArgCount: 2}; + A.TearOffClosure.prototype = {}; + A.StaticClosure.prototype = { + toString$0(_) { + var $name = this.$static_name; + if ($name == null) + return "Closure of unknown static method"; + return "Closure '" + A.unminifyOrTag($name) + "'"; + } + }; + A.BoundClosure.prototype = { + $eq(_, other) { + if (other == null) + return false; + if (this === other) + return true; + if (!(other instanceof A.BoundClosure)) + return false; + return this.$_target === other.$_target && this._receiver === other._receiver; + }, + get$hashCode(_) { + return (A.objectHashCode(this._receiver) ^ A.Primitives_objectHashCode(this.$_target)) >>> 0; + }, + toString$0(_) { + return "Closure '" + this.$_name + "' of " + ("Instance of '" + A.Primitives_objectTypeName(this._receiver) + "'"); + } + }; + A._CyclicInitializationError.prototype = { + toString$0(_) { + return "Reading static variable '" + this.variableName + "' during its initialization"; + } + }; + A.RuntimeError.prototype = { + toString$0(_) { + return "RuntimeError: " + this.message; + } + }; + A._AssertionError.prototype = { + toString$0(_) { + return "Assertion failed: " + A.Error_safeToString(this.message); + } + }; + A._Required.prototype = {}; + A.JsLinkedHashMap.prototype = { + get$length(_) { + return this.__js_helper$_length; + }, + get$keys() { + return new A.LinkedHashMapKeysIterable(this, A._instanceType(this)._eval$1("LinkedHashMapKeysIterable<1>")); + }, + containsKey$1(key) { + var strings = this._strings; + if (strings == null) + return false; + return strings[key] != null; + }, + $index(_, key) { + var strings, cell, t1, nums, _null = null; + if (typeof key == "string") { + strings = this._strings; + if (strings == null) + return _null; + cell = strings[key]; + t1 = cell == null ? _null : cell.hashMapCellValue; + return t1; + } else if (typeof key == "number" && (key & 0x3fffffff) === key) { + nums = this._nums; + if (nums == null) + return _null; + cell = nums[key]; + t1 = cell == null ? _null : cell.hashMapCellValue; + return t1; + } else + return this.internalGet$1(key); + }, + internalGet$1(key) { + var bucket, index, + rest = this.__js_helper$_rest; + if (rest == null) + return null; + bucket = rest[this.internalComputeHashCode$1(key)]; + index = this.internalFindBucketIndex$2(bucket, key); + if (index < 0) + return null; + return bucket[index].hashMapCellValue; + }, + $indexSet(_, key, value) { + var strings, nums, rest, hash, bucket, index, _this = this, + t1 = A._instanceType(_this); + t1._precomputed1._as(key); + t1._rest[1]._as(value); + if (typeof key == "string") { + strings = _this._strings; + _this.__js_helper$_addHashTableEntry$3(strings == null ? _this._strings = _this._newHashTable$0() : strings, key, value); + } else if (typeof key == "number" && (key & 0x3fffffff) === key) { + nums = _this._nums; + _this.__js_helper$_addHashTableEntry$3(nums == null ? _this._nums = _this._newHashTable$0() : nums, key, value); + } else { + rest = _this.__js_helper$_rest; + if (rest == null) + rest = _this.__js_helper$_rest = _this._newHashTable$0(); + hash = _this.internalComputeHashCode$1(key); + bucket = rest[hash]; + if (bucket == null) + rest[hash] = [_this._newLinkedCell$2(key, value)]; + else { + index = _this.internalFindBucketIndex$2(bucket, key); + if (index >= 0) + bucket[index].hashMapCellValue = value; + else + bucket.push(_this._newLinkedCell$2(key, value)); + } + } + }, + putIfAbsent$2(key, ifAbsent) { + var t2, value, _this = this, + t1 = A._instanceType(_this); + t1._precomputed1._as(key); + t1._eval$1("2()")._as(ifAbsent); + if (_this.containsKey$1(key)) { + t2 = _this.$index(0, key); + return t2 == null ? t1._rest[1]._as(t2) : t2; + } + value = ifAbsent.call$0(); + _this.$indexSet(0, key, value); + return value; + }, + remove$1(_, key) { + var t1 = this._removeHashTableEntry$2(this._strings, key); + return t1; + }, + forEach$1(_, action) { + var cell, modifications, _this = this; + A._instanceType(_this)._eval$1("~(1,2)")._as(action); + cell = _this._first; + modifications = _this._modifications; + for (; cell != null;) { + action.call$2(cell.hashMapCellKey, cell.hashMapCellValue); + if (modifications !== _this._modifications) + throw A.wrapException(A.ConcurrentModificationError$(_this)); + cell = cell._next; + } + }, + __js_helper$_addHashTableEntry$3(table, key, value) { + var cell, + t1 = A._instanceType(this); + t1._precomputed1._as(key); + t1._rest[1]._as(value); + cell = table[key]; + if (cell == null) + table[key] = this._newLinkedCell$2(key, value); + else + cell.hashMapCellValue = value; + }, + _removeHashTableEntry$2(table, key) { + var cell; + if (table == null) + return null; + cell = table[key]; + if (cell == null) + return null; + this._unlinkCell$1(cell); + delete table[key]; + return cell.hashMapCellValue; + }, + _modified$0() { + this._modifications = this._modifications + 1 & 1073741823; + }, + _newLinkedCell$2(key, value) { + var _this = this, + t1 = A._instanceType(_this), + cell = new A.LinkedHashMapCell(t1._precomputed1._as(key), t1._rest[1]._as(value)); + if (_this._first == null) + _this._first = _this._last = cell; + else { + t1 = _this._last; + t1.toString; + cell._previous = t1; + _this._last = t1._next = cell; + } + ++_this.__js_helper$_length; + _this._modified$0(); + return cell; + }, + _unlinkCell$1(cell) { + var _this = this, + previous = cell._previous, + next = cell._next; + if (previous == null) + _this._first = next; + else + previous._next = next; + if (next == null) + _this._last = previous; + else + next._previous = previous; + --_this.__js_helper$_length; + _this._modified$0(); + }, + internalComputeHashCode$1(key) { + return J.get$hashCode$(key) & 1073741823; + }, + internalFindBucketIndex$2(bucket, key) { + var $length, i; + if (bucket == null) + return -1; + $length = bucket.length; + for (i = 0; i < $length; ++i) + if (J.$eq$(bucket[i].hashMapCellKey, key)) + return i; + return -1; + }, + toString$0(_) { + return A.MapBase_mapToString(this); + }, + _newHashTable$0() { + var table = Object.create(null); + table[""] = table; + delete table[""]; + return table; + }, + $isLinkedHashMap: 1 + }; + A.LinkedHashMapCell.prototype = {}; + A.LinkedHashMapKeysIterable.prototype = { + get$length(_) { + return this._map.__js_helper$_length; + }, + get$iterator(_) { + var t1 = this._map; + return new A.LinkedHashMapKeyIterator(t1, t1._modifications, t1._first, this.$ti._eval$1("LinkedHashMapKeyIterator<1>")); + } + }; + A.LinkedHashMapKeyIterator.prototype = { + get$current() { + return this.__js_helper$_current; + }, + moveNext$0() { + var cell, _this = this, + t1 = _this._map; + if (_this._modifications !== t1._modifications) + throw A.wrapException(A.ConcurrentModificationError$(t1)); + cell = _this._cell; + if (cell == null) { + _this.set$__js_helper$_current(null); + return false; + } else { + _this.set$__js_helper$_current(cell.hashMapCellKey); + _this._cell = cell._next; + return true; + } + }, + set$__js_helper$_current(_current) { + this.__js_helper$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.initHooks_closure.prototype = { + call$1(o) { + return this.getTag(o); + }, + $signature: 13 + }; + A.initHooks_closure0.prototype = { + call$2(o, tag) { + return this.getUnknownTag(o, tag); + }, + $signature: 14 + }; + A.initHooks_closure1.prototype = { + call$1(tag) { + return this.prototypeForTag(A._asString(tag)); + }, + $signature: 15 + }; + A.NativeByteBuffer.prototype = { + get$runtimeType(receiver) { + return B.Type_ByteBuffer_rqD; + }, + asUint8List$2(receiver, offsetInBytes, $length) { + return $length == null ? new Uint8Array(receiver, offsetInBytes) : new Uint8Array(receiver, offsetInBytes, $length); + }, + asUint8List$0(receiver) { + return this.asUint8List$2(receiver, 0, null); + }, + $isTrustedGetRuntimeType: 1, + $isNativeByteBuffer: 1, + $isByteBuffer: 1 + }; + A.NativeTypedData.prototype = { + get$buffer(receiver) { + if (((receiver.$flags | 0) & 2) !== 0) + return new A._UnmodifiableNativeByteBufferView(receiver.buffer); + else + return receiver.buffer; + }, + _invalidPosition$3(receiver, position, $length, $name) { + var t1 = A.RangeError$range(position, 0, $length, $name, null); + throw A.wrapException(t1); + }, + _checkPosition$3(receiver, position, $length, $name) { + if (position >>> 0 !== position || position > $length) + this._invalidPosition$3(receiver, position, $length, $name); + } + }; + A._UnmodifiableNativeByteBufferView.prototype = { + asUint8List$2(_, offsetInBytes, $length) { + var result = A.NativeUint8List_NativeUint8List$view(this._data, offsetInBytes, $length); + result.$flags = 3; + return result; + }, + asUint8List$0(_) { + return this.asUint8List$2(0, 0, null); + }, + $isByteBuffer: 1 + }; + A.NativeByteData.prototype = { + get$runtimeType(receiver) { + return B.Type_ByteData_9dB; + }, + _setInt8$2(receiver, byteOffset, value) { + return receiver.setInt8(byteOffset, value); + }, + $isTrustedGetRuntimeType: 1, + $isByteData: 1 + }; + A.NativeTypedArray.prototype = { + get$length(receiver) { + return receiver.length; + }, + $isJavaScriptIndexingBehavior: 1 + }; + A.NativeTypedArrayOfDouble.prototype = { + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isEfficientLengthIterable: 1, + $isIterable: 1, + $isList: 1 + }; + A.NativeTypedArrayOfInt.prototype = { + setRange$3(receiver, start, end, iterable) { + var targetLength, count, sourceLength, source; + type$.Iterable_int._as(iterable); + receiver.$flags & 2 && A.throwUnsupportedOperation(receiver, 5); + targetLength = receiver.length; + this._checkPosition$3(receiver, start, targetLength, "start"); + this._checkPosition$3(receiver, end, targetLength, "end"); + if (start > end) + A.throwExpression(A.RangeError$range(start, 0, end, null, null)); + count = end - start; + sourceLength = iterable.length; + if (sourceLength < count) + A.throwExpression(A.StateError$("Not enough elements")); + source = sourceLength !== count ? iterable.subarray(0, count) : iterable; + receiver.set(source, start); + return; + }, + $isEfficientLengthIterable: 1, + $isIterable: 1, + $isList: 1 + }; + A.NativeFloat32List.prototype = { + get$runtimeType(receiver) { + return B.Type_Float32List_9Kz; + }, + $isTrustedGetRuntimeType: 1, + $isFloat32List: 1 + }; + A.NativeFloat64List.prototype = { + get$runtimeType(receiver) { + return B.Type_Float64List_9Kz; + }, + $isTrustedGetRuntimeType: 1, + $isFloat64List: 1 + }; + A.NativeInt16List.prototype = { + get$runtimeType(receiver) { + return B.Type_Int16List_s5h; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isInt16List: 1 + }; + A.NativeInt32List.prototype = { + get$runtimeType(receiver) { + return B.Type_Int32List_O8Z; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isInt32List: 1 + }; + A.NativeInt8List.prototype = { + get$runtimeType(receiver) { + return B.Type_Int8List_rFV; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isInt8List: 1 + }; + A.NativeUint16List.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint16List_kmP; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isUint16List: 1 + }; + A.NativeUint32List.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint32List_kmP; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isUint32List: 1 + }; + A.NativeUint8ClampedList.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint8ClampedList_04U; + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + $isTrustedGetRuntimeType: 1, + $isUint8ClampedList: 1 + }; + A.NativeUint8List.prototype = { + get$runtimeType(receiver) { + return B.Type_Uint8List_8Eb; + }, + get$length(receiver) { + return receiver.length; + }, + $index(receiver, index) { + A._asInt(index); + A._checkValidIndex(index, receiver, receiver.length); + return receiver[index]; + }, + sublist$2(receiver, start, end) { + return new Uint8Array(receiver.subarray(start, A._checkValidRange(start, end, receiver.length))); + }, + sublist$1(receiver, start) { + return this.sublist$2(receiver, start, null); + }, + $isTrustedGetRuntimeType: 1, + $isUint8List: 1 + }; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin.prototype = {}; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin.prototype = {}; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin.prototype = {}; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin.prototype = {}; + A.Rti.prototype = { + _eval$1(recipe) { + return A._Universe_evalInEnvironment(init.typeUniverse, this, recipe); + }, + _bind$1(typeOrTuple) { + return A._Universe_bind(init.typeUniverse, this, typeOrTuple); + } + }; + A._FunctionParameters.prototype = {}; + A._Type.prototype = { + toString$0(_) { + return A._rtiToString(this._rti, null); + } + }; + A._Error.prototype = { + toString$0(_) { + return this.__rti$_message; + } + }; + A._TypeError.prototype = {$isTypeError: 1}; + A._AsyncRun__initializeScheduleImmediate_internalCallback.prototype = { + call$1(__wc0_formal) { + var t1 = this._box_0, + f = t1.storedCallback; + t1.storedCallback = null; + f.call$0(); + }, + $signature: 2 + }; + A._AsyncRun__initializeScheduleImmediate_closure.prototype = { + call$1(callback) { + var t1, t2; + this._box_0.storedCallback = type$.void_Function._as(callback); + t1 = this.div; + t2 = this.span; + t1.firstChild ? t1.removeChild(t2) : t1.appendChild(t2); + }, + $signature: 16 + }; + A._AsyncRun__scheduleImmediateJsOverride_internalCallback.prototype = { + call$0() { + this.callback.call$0(); + }, + $signature: 5 + }; + A._AsyncRun__scheduleImmediateWithSetImmediate_internalCallback.prototype = { + call$0() { + this.callback.call$0(); + }, + $signature: 5 + }; + A._TimerImpl.prototype = { + _TimerImpl$2(milliseconds, callback) { + if (self.setTimeout != null) + self.setTimeout(A.convertDartClosureToJS(new A._TimerImpl_internalCallback(this, callback), 0), milliseconds); + else + throw A.wrapException(A.UnsupportedError$("`setTimeout()` not found.")); + } + }; + A._TimerImpl_internalCallback.prototype = { + call$0() { + this.callback.call$0(); + }, + $signature: 0 + }; + A._AsyncAwaitCompleter.prototype = { + complete$1(value) { + var t2, _this = this, + t1 = _this.$ti; + t1._eval$1("1/?")._as(value); + if (value == null) + value = t1._precomputed1._as(value); + if (!_this.isSync) + _this._future._asyncComplete$1(value); + else { + t2 = _this._future; + if (t1._eval$1("Future<1>")._is(value)) + t2._chainFuture$1(value); + else + t2._completeWithValue$1(value); + } + }, + completeError$2(e, st) { + var t1 = this._future; + if (this.isSync) + t1._completeError$2(e, st); + else + t1._asyncCompleteError$2(e, st); + } + }; + A._awaitOnObject_closure.prototype = { + call$1(result) { + return this.bodyFunction.call$2(0, result); + }, + $signature: 3 + }; + A._awaitOnObject_closure0.prototype = { + call$2(error, stackTrace) { + this.bodyFunction.call$2(1, new A.ExceptionAndStackTrace(error, type$.StackTrace._as(stackTrace))); + }, + $signature: 17 + }; + A._wrapJsFunctionForAsync_closure.prototype = { + call$2(errorCode, result) { + this.$protected(A._asInt(errorCode), result); + }, + $signature: 18 + }; + A.AsyncError.prototype = { + toString$0(_) { + return A.S(this.error); + }, + $isError: 1, + get$stackTrace() { + return this.stackTrace; + } + }; + A._BroadcastStream.prototype = {}; + A._BroadcastSubscription.prototype = { + _onPause$0() { + }, + _onResume$0() { + }, + set$_async$_next(_next) { + this._async$_next = this.$ti._eval$1("_BroadcastSubscription<1>?")._as(_next); + }, + set$_async$_previous(_previous) { + this._async$_previous = this.$ti._eval$1("_BroadcastSubscription<1>?")._as(_previous); + } + }; + A._BroadcastStreamController.prototype = { + get$_mayAddEvent() { + return this._state < 4; + }, + _subscribe$4(onData, onError, onDone, cancelOnError) { + var t2, t3, t4, t5, subscription, oldLast, _this = this, + t1 = A._instanceType(_this); + t1._eval$1("~(1)?")._as(onData); + type$.nullable_void_Function._as(onDone); + if ((_this._state & 4) !== 0) { + t1 = new A._DoneStreamSubscription($.Zone__current, t1._eval$1("_DoneStreamSubscription<1>")); + A.scheduleMicrotask(t1.get$_onMicrotask()); + if (onDone != null) + t1.set$_onDone(type$.void_Function._as(onDone)); + return t1; + } + t2 = $.Zone__current; + t3 = cancelOnError ? 1 : 0; + t4 = onError != null ? 32 : 0; + type$.$env_1_1_void._bind$1(t1._precomputed1)._eval$1("1(2)")._as(onData); + A._BufferingStreamSubscription__registerErrorHandler(t2, onError); + t5 = onDone == null ? A.async___nullDoneHandler$closure() : onDone; + type$.void_Function._as(t5); + t1 = t1._eval$1("_BroadcastSubscription<1>"); + subscription = new A._BroadcastSubscription(_this, onData, t2, t3 | t4, t1); + subscription.set$_async$_previous(subscription); + subscription.set$_async$_next(subscription); + t1._as(subscription); + subscription._eventState = _this._state & 1; + oldLast = _this._lastSubscription; + _this.set$_lastSubscription(subscription); + subscription.set$_async$_next(null); + subscription.set$_async$_previous(oldLast); + if (oldLast == null) + _this.set$_firstSubscription(subscription); + else + oldLast.set$_async$_next(subscription); + if (_this._firstSubscription == _this._lastSubscription) + A._runGuarded(_this.onListen); + return subscription; + }, + _addEventError$0() { + if ((this._state & 4) !== 0) + return new A.StateError("Cannot add new events after calling close"); + return new A.StateError("Cannot add new events while doing an addStream"); + }, + _forEachListener$1(action) { + var t2, subscription, id, next, previous, _this = this, + t1 = A._instanceType(_this); + t1._eval$1("~(_BufferingStreamSubscription<1>)")._as(action); + t2 = _this._state; + if ((t2 & 2) !== 0) + throw A.wrapException(A.StateError$(string$.Cannot)); + subscription = _this._firstSubscription; + if (subscription == null) + return; + id = t2 & 1; + _this._state = t2 ^ 3; + for (t1 = t1._eval$1("_BroadcastSubscription<1>"); subscription != null;) { + t2 = subscription._eventState; + if ((t2 & 1) === id) { + subscription._eventState = t2 | 2; + action.call$1(subscription); + t2 = subscription._eventState ^= 1; + next = subscription._async$_next; + if ((t2 & 4) !== 0) { + t1._as(subscription); + previous = subscription._async$_previous; + if (previous == null) + _this.set$_firstSubscription(next); + else + previous.set$_async$_next(next); + if (next == null) + _this.set$_lastSubscription(previous); + else + next.set$_async$_previous(previous); + subscription.set$_async$_previous(subscription); + subscription.set$_async$_next(subscription); + } + subscription._eventState &= 4294967293; + subscription = next; + } else + subscription = subscription._async$_next; + } + _this._state &= 4294967293; + if (_this._firstSubscription == null) + _this._callOnCancel$0(); + }, + _callOnCancel$0() { + if ((this._state & 4) !== 0) + if (null.get$_mayComplete()) + null._asyncComplete$1(null); + A._runGuarded(this.onCancel); + }, + set$_firstSubscription(_firstSubscription) { + this._firstSubscription = A._instanceType(this)._eval$1("_BroadcastSubscription<1>?")._as(_firstSubscription); + }, + set$_lastSubscription(_lastSubscription) { + this._lastSubscription = A._instanceType(this)._eval$1("_BroadcastSubscription<1>?")._as(_lastSubscription); + }, + $isStreamController: 1, + $is_StreamControllerLifecycle: 1, + $is_EventDispatch: 1 + }; + A._SyncBroadcastStreamController.prototype = { + get$_mayAddEvent() { + return A._BroadcastStreamController.prototype.get$_mayAddEvent.call(this) && (this._state & 2) === 0; + }, + _addEventError$0() { + if ((this._state & 2) !== 0) + return new A.StateError(string$.Cannot); + return this.super$_BroadcastStreamController$_addEventError(); + }, + _sendData$1(data) { + var t1, _this = this; + _this.$ti._precomputed1._as(data); + t1 = _this._firstSubscription; + if (t1 == null) + return; + if (t1 === _this._lastSubscription) { + _this._state |= 2; + t1._add$1(data); + _this._state &= 4294967293; + if (_this._firstSubscription == null) + _this._callOnCancel$0(); + return; + } + _this._forEachListener$1(new A._SyncBroadcastStreamController__sendData_closure(_this, data)); + } + }; + A._SyncBroadcastStreamController__sendData_closure.prototype = { + call$1(subscription) { + this.$this.$ti._eval$1("_BufferingStreamSubscription<1>")._as(subscription)._add$1(this.data); + }, + $signature() { + return this.$this.$ti._eval$1("~(_BufferingStreamSubscription<1>)"); + } + }; + A._Completer.prototype = { + completeError$2(error, stackTrace) { + var _0_0, + t1 = this.future; + if ((t1._state & 30) !== 0) + throw A.wrapException(A.StateError$("Future already completed")); + _0_0 = A._interceptUserError(error, stackTrace); + t1._asyncCompleteError$2(_0_0.error, _0_0.stackTrace); + }, + completeError$1(error) { + return this.completeError$2(error, null); + } + }; + A._AsyncCompleter.prototype = { + complete$1(value) { + var t2, + t1 = this.$ti; + t1._eval$1("1/?")._as(value); + t2 = this.future; + if ((t2._state & 30) !== 0) + throw A.wrapException(A.StateError$("Future already completed")); + t2._asyncComplete$1(t1._eval$1("1/")._as(value)); + } + }; + A._FutureListener.prototype = { + matchesErrorTest$1(asyncError) { + if ((this.state & 15) !== 6) + return true; + return this.result._zone.runUnary$2$2(type$.bool_Function_Object._as(this.callback), asyncError.error, type$.bool, type$.Object); + }, + handleError$1(asyncError) { + var exception, _this = this, + errorCallback = _this.errorCallback, + result = null, + t1 = type$.dynamic, + t2 = type$.Object, + t3 = asyncError.error, + t4 = _this.result._zone; + if (type$.dynamic_Function_Object_StackTrace._is(errorCallback)) + result = t4.runBinary$3$3(errorCallback, t3, asyncError.stackTrace, t1, t2, type$.StackTrace); + else + result = t4.runUnary$2$2(type$.dynamic_Function_Object._as(errorCallback), t3, t1, t2); + try { + t1 = _this.$ti._eval$1("2/")._as(result); + return t1; + } catch (exception) { + if (type$.TypeError._is(A.unwrapException(exception))) { + if ((_this.state & 1) !== 0) + throw A.wrapException(A.ArgumentError$("The error handler of Future.then must return a value of the returned future's type", "onError")); + throw A.wrapException(A.ArgumentError$("The error handler of Future.catchError must return a value of the future's type", "onError")); + } else + throw exception; + } + } + }; + A._Future.prototype = { + then$1$2$onError(f, onError, $R) { + var currentZone, result, + t1 = this.$ti; + t1._bind$1($R)._eval$1("1/(2)")._as(f); + currentZone = $.Zone__current; + if (currentZone === B.C__RootZone) { + if (!type$.dynamic_Function_Object_StackTrace._is(onError) && !type$.dynamic_Function_Object._is(onError)) + throw A.wrapException(A.ArgumentError$value(onError, "onError", string$.Error_)); + } else { + $R._eval$1("@<0/>")._bind$1(t1._precomputed1)._eval$1("1(2)")._as(f); + onError = A._registerErrorHandler(onError, currentZone); + } + result = new A._Future(currentZone, $R._eval$1("_Future<0>")); + this._addListener$1(new A._FutureListener(result, 3, f, onError, t1._eval$1("@<1>")._bind$1($R)._eval$1("_FutureListener<1,2>"))); + return result; + }, + _thenAwait$1$2(f, onError, $E) { + var result, + t1 = this.$ti; + t1._bind$1($E)._eval$1("1/(2)")._as(f); + result = new A._Future($.Zone__current, $E._eval$1("_Future<0>")); + this._addListener$1(new A._FutureListener(result, 19, f, onError, t1._eval$1("@<1>")._bind$1($E)._eval$1("_FutureListener<1,2>"))); + return result; + }, + _setErrorObject$1(error) { + this._state = this._state & 1 | 16; + this._resultOrListeners = error; + }, + _cloneResult$1(source) { + this._state = source._state & 30 | this._state & 1; + this._resultOrListeners = source._resultOrListeners; + }, + _addListener$1(listener) { + var source, _this = this, + t1 = _this._state; + if (t1 <= 3) { + listener._nextListener = type$.nullable__FutureListener_dynamic_dynamic._as(_this._resultOrListeners); + _this._resultOrListeners = listener; + } else { + if ((t1 & 4) !== 0) { + source = type$._Future_dynamic._as(_this._resultOrListeners); + if ((source._state & 24) === 0) { + source._addListener$1(listener); + return; + } + _this._cloneResult$1(source); + } + A._rootScheduleMicrotask(null, null, _this._zone, type$.void_Function._as(new A._Future__addListener_closure(_this, listener))); + } + }, + _prependListeners$1(listeners) { + var t1, existingListeners, next, cursor, next0, source, _this = this, _box_0 = {}; + _box_0.listeners = listeners; + if (listeners == null) + return; + t1 = _this._state; + if (t1 <= 3) { + existingListeners = type$.nullable__FutureListener_dynamic_dynamic._as(_this._resultOrListeners); + _this._resultOrListeners = listeners; + if (existingListeners != null) { + next = listeners._nextListener; + for (cursor = listeners; next != null; cursor = next, next = next0) + next0 = next._nextListener; + cursor._nextListener = existingListeners; + } + } else { + if ((t1 & 4) !== 0) { + source = type$._Future_dynamic._as(_this._resultOrListeners); + if ((source._state & 24) === 0) { + source._prependListeners$1(listeners); + return; + } + _this._cloneResult$1(source); + } + _box_0.listeners = _this._reverseListeners$1(listeners); + A._rootScheduleMicrotask(null, null, _this._zone, type$.void_Function._as(new A._Future__prependListeners_closure(_box_0, _this))); + } + }, + _removeListeners$0() { + var current = type$.nullable__FutureListener_dynamic_dynamic._as(this._resultOrListeners); + this._resultOrListeners = null; + return this._reverseListeners$1(current); + }, + _reverseListeners$1(listeners) { + var current, prev, next; + for (current = listeners, prev = null; current != null; prev = current, current = next) { + next = current._nextListener; + current._nextListener = prev; + } + return prev; + }, + _chainForeignFuture$1(source) { + var e, s, exception, _this = this; + _this._state ^= 2; + try { + source.then$1$2$onError(new A._Future__chainForeignFuture_closure(_this), new A._Future__chainForeignFuture_closure0(_this), type$.Null); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + A.scheduleMicrotask(new A._Future__chainForeignFuture_closure1(_this, e, s)); + } + }, + _completeWithValue$1(value) { + var listeners, _this = this; + _this.$ti._precomputed1._as(value); + listeners = _this._removeListeners$0(); + _this._state = 8; + _this._resultOrListeners = value; + A._Future__propagateToListeners(_this, listeners); + }, + _completeWithResultOf$1(source) { + var t1, listeners, _this = this; + if ((source._state & 16) !== 0) { + t1 = _this._zone === source._zone; + t1 = !(t1 || t1); + } else + t1 = false; + if (t1) + return; + listeners = _this._removeListeners$0(); + _this._cloneResult$1(source); + A._Future__propagateToListeners(_this, listeners); + }, + _completeError$2(error, stackTrace) { + var listeners; + type$.Object._as(error); + type$.StackTrace._as(stackTrace); + listeners = this._removeListeners$0(); + this._setErrorObject$1(new A.AsyncError(error, stackTrace)); + A._Future__propagateToListeners(this, listeners); + }, + _asyncComplete$1(value) { + var t1 = this.$ti; + t1._eval$1("1/")._as(value); + if (t1._eval$1("Future<1>")._is(value)) { + this._chainFuture$1(value); + return; + } + this._asyncCompleteWithValue$1(value); + }, + _asyncCompleteWithValue$1(value) { + var _this = this; + _this.$ti._precomputed1._as(value); + _this._state ^= 2; + A._rootScheduleMicrotask(null, null, _this._zone, type$.void_Function._as(new A._Future__asyncCompleteWithValue_closure(_this, value))); + }, + _chainFuture$1(value) { + var t1 = this.$ti; + t1._eval$1("Future<1>")._as(value); + if (t1._is(value)) { + A._Future__chainCoreFuture(value, this, false); + return; + } + this._chainForeignFuture$1(value); + }, + _asyncCompleteError$2(error, stackTrace) { + this._state ^= 2; + A._rootScheduleMicrotask(null, null, this._zone, type$.void_Function._as(new A._Future__asyncCompleteError_closure(this, error, stackTrace))); + }, + $isFuture: 1 + }; + A._Future__addListener_closure.prototype = { + call$0() { + A._Future__propagateToListeners(this.$this, this.listener); + }, + $signature: 0 + }; + A._Future__prependListeners_closure.prototype = { + call$0() { + A._Future__propagateToListeners(this.$this, this._box_0.listeners); + }, + $signature: 0 + }; + A._Future__chainForeignFuture_closure.prototype = { + call$1(value) { + var error, stackTrace, exception, + t1 = this.$this; + t1._state ^= 2; + try { + t1._completeWithValue$1(t1.$ti._precomputed1._as(value)); + } catch (exception) { + error = A.unwrapException(exception); + stackTrace = A.getTraceFromException(exception); + t1._completeError$2(error, stackTrace); + } + }, + $signature: 2 + }; + A._Future__chainForeignFuture_closure0.prototype = { + call$2(error, stackTrace) { + this.$this._completeError$2(type$.Object._as(error), type$.StackTrace._as(stackTrace)); + }, + $signature: 7 + }; + A._Future__chainForeignFuture_closure1.prototype = { + call$0() { + this.$this._completeError$2(this.e, this.s); + }, + $signature: 0 + }; + A._Future__chainCoreFuture_closure.prototype = { + call$0() { + A._Future__chainCoreFuture(this._box_0.source, this.target, true); + }, + $signature: 0 + }; + A._Future__asyncCompleteWithValue_closure.prototype = { + call$0() { + this.$this._completeWithValue$1(this.value); + }, + $signature: 0 + }; + A._Future__asyncCompleteError_closure.prototype = { + call$0() { + this.$this._completeError$2(this.error, this.stackTrace); + }, + $signature: 0 + }; + A._Future__propagateToListeners_handleWhenCompleteCallback.prototype = { + call$0() { + var e, s, t1, exception, t2, t3, originalSource, joinedResult, _this = this, completeResult = null; + try { + t1 = _this._box_0.listener; + completeResult = t1.result._zone.run$1$1(type$.dynamic_Function._as(t1.callback), type$.dynamic); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + if (_this.hasError && type$.AsyncError._as(_this._box_1.source._resultOrListeners).error === e) { + t1 = _this._box_0; + t1.listenerValueOrError = type$.AsyncError._as(_this._box_1.source._resultOrListeners); + } else { + t1 = e; + t2 = s; + if (t2 == null) + t2 = A.AsyncError_defaultStackTrace(t1); + t3 = _this._box_0; + t3.listenerValueOrError = new A.AsyncError(t1, t2); + t1 = t3; + } + t1.listenerHasError = true; + return; + } + if (completeResult instanceof A._Future && (completeResult._state & 24) !== 0) { + if ((completeResult._state & 16) !== 0) { + t1 = _this._box_0; + t1.listenerValueOrError = type$.AsyncError._as(completeResult._resultOrListeners); + t1.listenerHasError = true; + } + return; + } + if (completeResult instanceof A._Future) { + originalSource = _this._box_1.source; + joinedResult = new A._Future(originalSource._zone, originalSource.$ti); + completeResult.then$1$2$onError(new A._Future__propagateToListeners_handleWhenCompleteCallback_closure(joinedResult, originalSource), new A._Future__propagateToListeners_handleWhenCompleteCallback_closure0(joinedResult), type$.void); + t1 = _this._box_0; + t1.listenerValueOrError = joinedResult; + t1.listenerHasError = false; + } + }, + $signature: 0 + }; + A._Future__propagateToListeners_handleWhenCompleteCallback_closure.prototype = { + call$1(__wc0_formal) { + this.joinedResult._completeWithResultOf$1(this.originalSource); + }, + $signature: 2 + }; + A._Future__propagateToListeners_handleWhenCompleteCallback_closure0.prototype = { + call$2(e, s) { + this.joinedResult._completeError$2(type$.Object._as(e), type$.StackTrace._as(s)); + }, + $signature: 7 + }; + A._Future__propagateToListeners_handleValueCallback.prototype = { + call$0() { + var e, s, t1, t2, t3, t4, t5, exception; + try { + t1 = this._box_0; + t2 = t1.listener; + t3 = t2.$ti; + t4 = t3._precomputed1; + t5 = t4._as(this.sourceResult); + t1.listenerValueOrError = t2.result._zone.runUnary$2$2(t3._eval$1("2/(1)")._as(t2.callback), t5, t3._eval$1("2/"), t4); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + t1 = e; + t2 = s; + if (t2 == null) + t2 = A.AsyncError_defaultStackTrace(t1); + t3 = this._box_0; + t3.listenerValueOrError = new A.AsyncError(t1, t2); + t3.listenerHasError = true; + } + }, + $signature: 0 + }; + A._Future__propagateToListeners_handleError.prototype = { + call$0() { + var asyncError, e, s, t1, exception, t2, t3, _this = this; + try { + asyncError = type$.AsyncError._as(_this._box_1.source._resultOrListeners); + t1 = _this._box_0; + if (t1.listener.matchesErrorTest$1(asyncError) && t1.listener.errorCallback != null) { + t1.listenerValueOrError = t1.listener.handleError$1(asyncError); + t1.listenerHasError = false; + } + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + t1 = type$.AsyncError._as(_this._box_1.source._resultOrListeners); + if (t1.error === e) { + t2 = _this._box_0; + t2.listenerValueOrError = t1; + t1 = t2; + } else { + t1 = e; + t2 = s; + if (t2 == null) + t2 = A.AsyncError_defaultStackTrace(t1); + t3 = _this._box_0; + t3.listenerValueOrError = new A.AsyncError(t1, t2); + t1 = t3; + } + t1.listenerHasError = true; + } + }, + $signature: 0 + }; + A._AsyncCallbackEntry.prototype = {}; + A.Stream.prototype = { + get$length(_) { + var t1 = {}, + future = new A._Future($.Zone__current, type$._Future_int); + t1.count = 0; + this.listen$4$cancelOnError$onDone$onError(new A.Stream_length_closure(t1, this), true, new A.Stream_length_closure0(t1, future), future.get$_completeError()); + return future; + } + }; + A.Stream_length_closure.prototype = { + call$1(__wc0_formal) { + this.$this.$ti._precomputed1._as(__wc0_formal); + ++this._box_0.count; + }, + $signature() { + return this.$this.$ti._eval$1("~(1)"); + } + }; + A.Stream_length_closure0.prototype = { + call$0() { + var t1 = this.future, + t2 = t1.$ti, + t3 = t2._eval$1("1/")._as(this._box_0.count), + listeners = t1._removeListeners$0(); + t2._precomputed1._as(t3); + t1._state = 8; + t1._resultOrListeners = t3; + A._Future__propagateToListeners(t1, listeners); + }, + $signature: 0 + }; + A._ControllerStream.prototype = { + get$hashCode(_) { + return (A.Primitives_objectHashCode(this._async$_controller) ^ 892482866) >>> 0; + }, + $eq(_, other) { + if (other == null) + return false; + if (this === other) + return true; + return other instanceof A._BroadcastStream && other._async$_controller === this._async$_controller; + } + }; + A._ControllerSubscription.prototype = { + _onPause$0() { + A._instanceType(this._async$_controller)._eval$1("StreamSubscription<1>")._as(this); + }, + _onResume$0() { + A._instanceType(this._async$_controller)._eval$1("StreamSubscription<1>")._as(this); + } + }; + A._BufferingStreamSubscription.prototype = { + _add$1(data) { + var t2, _this = this, + t1 = A._instanceType(_this); + t1._precomputed1._as(data); + t2 = _this._state; + if ((t2 & 8) !== 0) + return; + if (t2 < 64) + _this._sendData$1(data); + else + _this._addPending$1(new A._DelayedData(data, t1._eval$1("_DelayedData<1>"))); + }, + _onPause$0() { + }, + _onResume$0() { + }, + _addPending$1($event) { + var lastEvent, t1, _this = this, + pending = _this._pending; + if (pending == null) { + pending = new A._PendingEvents(A._instanceType(_this)._eval$1("_PendingEvents<1>")); + _this.set$_pending(pending); + } + lastEvent = pending.lastPendingEvent; + if (lastEvent == null) + pending.firstPendingEvent = pending.lastPendingEvent = $event; + else + pending.lastPendingEvent = lastEvent.next = $event; + t1 = _this._state; + if ((t1 & 128) === 0) { + t1 |= 128; + _this._state = t1; + if (t1 < 256) + pending.schedule$1(_this); + } + }, + _sendData$1(data) { + var t2, _this = this, + t1 = A._instanceType(_this)._precomputed1; + t1._as(data); + t2 = _this._state; + _this._state = t2 | 64; + _this._zone.runUnaryGuarded$1$2(_this._onData, data, t1); + _this._state &= 4294967231; + _this._checkState$1((t2 & 4) !== 0); + }, + _checkState$1(wasInputPaused) { + var t2, isInputPaused, _this = this, + t1 = _this._state; + if ((t1 & 128) !== 0 && _this._pending.lastPendingEvent == null) { + t1 = _this._state = t1 & 4294967167; + t2 = false; + if ((t1 & 4) !== 0) + if (t1 < 256) { + t2 = _this._pending; + t2 = t2 == null ? null : t2.lastPendingEvent == null; + t2 = t2 !== false; + } + if (t2) { + t1 &= 4294967291; + _this._state = t1; + } + } + for (; true; wasInputPaused = isInputPaused) { + if ((t1 & 8) !== 0) { + _this.set$_pending(null); + return; + } + isInputPaused = (t1 & 4) !== 0; + if (wasInputPaused === isInputPaused) + break; + _this._state = t1 ^ 64; + if (isInputPaused) + _this._onPause$0(); + else + _this._onResume$0(); + t1 = _this._state &= 4294967231; + } + if ((t1 & 128) !== 0 && t1 < 256) + _this._pending.schedule$1(_this); + }, + set$_pending(_pending) { + this._pending = A._instanceType(this)._eval$1("_PendingEvents<1>?")._as(_pending); + }, + $isStreamSubscription: 1, + $is_EventDispatch: 1 + }; + A._StreamImpl.prototype = { + listen$4$cancelOnError$onDone$onError(onData, cancelOnError, onDone, onError) { + var t1 = this.$ti; + t1._eval$1("~(1)?")._as(onData); + type$.nullable_void_Function._as(onDone); + return this._async$_controller._subscribe$4(t1._eval$1("~(1)?")._as(onData), onError, onDone, cancelOnError === true); + }, + listen$1(onData) { + return this.listen$4$cancelOnError$onDone$onError(onData, null, null, null); + } + }; + A._DelayedEvent.prototype = {}; + A._DelayedData.prototype = {}; + A._PendingEvents.prototype = { + schedule$1(dispatch) { + var t1, _this = this; + _this.$ti._eval$1("_EventDispatch<1>")._as(dispatch); + t1 = _this._state; + if (t1 === 1) + return; + if (t1 >= 1) { + _this._state = 1; + return; + } + A.scheduleMicrotask(new A._PendingEvents_schedule_closure(_this, dispatch)); + _this._state = 1; + } + }; + A._PendingEvents_schedule_closure.prototype = { + call$0() { + var t2, $event, nextEvent, + t1 = this.$this, + oldState = t1._state; + t1._state = 0; + if (oldState === 3) + return; + t2 = t1.$ti._eval$1("_EventDispatch<1>")._as(this.dispatch); + $event = t1.firstPendingEvent; + nextEvent = $event.next; + t1.firstPendingEvent = nextEvent; + if (nextEvent == null) + t1.lastPendingEvent = null; + A._instanceType($event)._eval$1("_EventDispatch<1>")._as(t2)._sendData$1($event.value); + }, + $signature: 0 + }; + A._DoneStreamSubscription.prototype = { + _onMicrotask$0() { + var _0_0, _this = this, + unscheduledState = _this._state - 1; + if (unscheduledState === 0) { + _this._state = -1; + _0_0 = _this._onDone; + if (_0_0 != null) { + _this.set$_onDone(null); + _this._zone.runGuarded$1(_0_0); + } + } else + _this._state = unscheduledState; + }, + set$_onDone(_onDone) { + this._onDone = type$.nullable_void_Function._as(_onDone); + }, + $isStreamSubscription: 1 + }; + A._StreamIterator.prototype = {}; + A._Zone.prototype = {$isZone: 1}; + A._rootHandleError_closure.prototype = { + call$0() { + A.Error_throwWithStackTrace(this.error, this.stackTrace); + }, + $signature: 0 + }; + A._RootZone.prototype = { + runGuarded$1(f) { + var e, s, exception; + type$.void_Function._as(f); + try { + if (B.C__RootZone === $.Zone__current) { + f.call$0(); + return; + } + A._rootRun(null, null, this, f, type$.void); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + A._rootHandleError(type$.Object._as(e), type$.StackTrace._as(s)); + } + }, + runUnaryGuarded$1$2(f, arg, $T) { + var e, s, exception; + $T._eval$1("~(0)")._as(f); + $T._as(arg); + try { + if (B.C__RootZone === $.Zone__current) { + f.call$1(arg); + return; + } + A._rootRunUnary(null, null, this, f, arg, type$.void, $T); + } catch (exception) { + e = A.unwrapException(exception); + s = A.getTraceFromException(exception); + A._rootHandleError(type$.Object._as(e), type$.StackTrace._as(s)); + } + }, + bindCallbackGuarded$1(f) { + return new A._RootZone_bindCallbackGuarded_closure(this, type$.void_Function._as(f)); + }, + $index(_, key) { + return null; + }, + run$1$1(f, $R) { + $R._eval$1("0()")._as(f); + if ($.Zone__current === B.C__RootZone) + return f.call$0(); + return A._rootRun(null, null, this, f, $R); + }, + runUnary$2$2(f, arg, $R, $T) { + $R._eval$1("@<0>")._bind$1($T)._eval$1("1(2)")._as(f); + $T._as(arg); + if ($.Zone__current === B.C__RootZone) + return f.call$1(arg); + return A._rootRunUnary(null, null, this, f, arg, $R, $T); + }, + runBinary$3$3(f, arg1, arg2, $R, T1, T2) { + $R._eval$1("@<0>")._bind$1(T1)._bind$1(T2)._eval$1("1(2,3)")._as(f); + T1._as(arg1); + T2._as(arg2); + if ($.Zone__current === B.C__RootZone) + return f.call$2(arg1, arg2); + return A._rootRunBinary(null, null, this, f, arg1, arg2, $R, T1, T2); + }, + registerBinaryCallback$3$1(f, $R, T1, T2) { + return $R._eval$1("@<0>")._bind$1(T1)._bind$1(T2)._eval$1("1(2,3)")._as(f); + } + }; + A._RootZone_bindCallbackGuarded_closure.prototype = { + call$0() { + return this.$this.runGuarded$1(this.f); + }, + $signature: 0 + }; + A._HashMap.prototype = { + get$length(_) { + return this._collection$_length; + }, + get$keys() { + return new A._HashMapKeyIterable(this, this.$ti._eval$1("_HashMapKeyIterable<1>")); + }, + containsKey$1(key) { + var strings, nums; + if (typeof key == "string" && key !== "__proto__") { + strings = this._collection$_strings; + return strings == null ? false : strings[key] != null; + } else if (typeof key == "number" && (key & 1073741823) === key) { + nums = this._collection$_nums; + return nums == null ? false : nums[key] != null; + } else + return this._containsKey$1(key); + }, + _containsKey$1(key) { + var rest = this._collection$_rest; + if (rest == null) + return false; + return this._findBucketIndex$2(this._getBucket$2(rest, key), key) >= 0; + }, + $index(_, key) { + var strings, t1, nums; + if (typeof key == "string" && key !== "__proto__") { + strings = this._collection$_strings; + t1 = strings == null ? null : A._HashMap__getTableEntry(strings, key); + return t1; + } else if (typeof key == "number" && (key & 1073741823) === key) { + nums = this._collection$_nums; + t1 = nums == null ? null : A._HashMap__getTableEntry(nums, key); + return t1; + } else + return this._get$1(key); + }, + _get$1(key) { + var bucket, index, + rest = this._collection$_rest; + if (rest == null) + return null; + bucket = this._getBucket$2(rest, key); + index = this._findBucketIndex$2(bucket, key); + return index < 0 ? null : bucket[index + 1]; + }, + $indexSet(_, key, value) { + var strings, nums, rest, hash, bucket, index, _this = this, + t1 = _this.$ti; + t1._precomputed1._as(key); + t1._rest[1]._as(value); + if (typeof key == "string" && key !== "__proto__") { + strings = _this._collection$_strings; + _this._addHashTableEntry$3(strings == null ? _this._collection$_strings = A._HashMap__newHashTable() : strings, key, value); + } else if (typeof key == "number" && (key & 1073741823) === key) { + nums = _this._collection$_nums; + _this._addHashTableEntry$3(nums == null ? _this._collection$_nums = A._HashMap__newHashTable() : nums, key, value); + } else { + rest = _this._collection$_rest; + if (rest == null) + rest = _this._collection$_rest = A._HashMap__newHashTable(); + hash = A.objectHashCode(key) & 1073741823; + bucket = rest[hash]; + if (bucket == null) { + A._HashMap__setTableEntry(rest, hash, [key, value]); + ++_this._collection$_length; + _this._collection$_keys = null; + } else { + index = _this._findBucketIndex$2(bucket, key); + if (index >= 0) + bucket[index + 1] = value; + else { + bucket.push(key, value); + ++_this._collection$_length; + _this._collection$_keys = null; + } + } + } + }, + forEach$1(_, action) { + var keys, $length, t2, i, key, t3, _this = this, + t1 = _this.$ti; + t1._eval$1("~(1,2)")._as(action); + keys = _this._computeKeys$0(); + for ($length = keys.length, t2 = t1._precomputed1, t1 = t1._rest[1], i = 0; i < $length; ++i) { + key = keys[i]; + t2._as(key); + t3 = _this.$index(0, key); + action.call$2(key, t3 == null ? t1._as(t3) : t3); + if (keys !== _this._collection$_keys) + throw A.wrapException(A.ConcurrentModificationError$(_this)); + } + }, + _computeKeys$0() { + var strings, index, names, entries, i, nums, rest, bucket, $length, i0, _this = this, + result = _this._collection$_keys; + if (result != null) + return result; + result = A.List_List$filled(_this._collection$_length, null, false, type$.dynamic); + strings = _this._collection$_strings; + index = 0; + if (strings != null) { + names = Object.getOwnPropertyNames(strings); + entries = names.length; + for (i = 0; i < entries; ++i) { + result[index] = names[i]; + ++index; + } + } + nums = _this._collection$_nums; + if (nums != null) { + names = Object.getOwnPropertyNames(nums); + entries = names.length; + for (i = 0; i < entries; ++i) { + result[index] = +names[i]; + ++index; + } + } + rest = _this._collection$_rest; + if (rest != null) { + names = Object.getOwnPropertyNames(rest); + entries = names.length; + for (i = 0; i < entries; ++i) { + bucket = rest[names[i]]; + $length = bucket.length; + for (i0 = 0; i0 < $length; i0 += 2) { + result[index] = bucket[i0]; + ++index; + } + } + } + return _this._collection$_keys = result; + }, + _addHashTableEntry$3(table, key, value) { + var t1 = this.$ti; + t1._precomputed1._as(key); + t1._rest[1]._as(value); + if (table[key] == null) { + ++this._collection$_length; + this._collection$_keys = null; + } + A._HashMap__setTableEntry(table, key, value); + }, + _getBucket$2(table, key) { + return table[A.objectHashCode(key) & 1073741823]; + } + }; + A._IdentityHashMap.prototype = { + _findBucketIndex$2(bucket, key) { + var $length, i, t1; + if (bucket == null) + return -1; + $length = bucket.length; + for (i = 0; i < $length; i += 2) { + t1 = bucket[i]; + if (t1 == null ? key == null : t1 === key) + return i; + } + return -1; + } + }; + A._HashMapKeyIterable.prototype = { + get$length(_) { + return this._collection$_map._collection$_length; + }, + get$iterator(_) { + var t1 = this._collection$_map; + return new A._HashMapKeyIterator(t1, t1._computeKeys$0(), this.$ti._eval$1("_HashMapKeyIterator<1>")); + } + }; + A._HashMapKeyIterator.prototype = { + get$current() { + var t1 = this._collection$_current; + return t1 == null ? this.$ti._precomputed1._as(t1) : t1; + }, + moveNext$0() { + var _this = this, + keys = _this._collection$_keys, + offset = _this._offset, + t1 = _this._collection$_map; + if (keys !== t1._collection$_keys) + throw A.wrapException(A.ConcurrentModificationError$(t1)); + else if (offset >= keys.length) { + _this.set$_collection$_current(null); + return false; + } else { + _this.set$_collection$_current(keys[offset]); + _this._offset = offset + 1; + return true; + } + }, + set$_collection$_current(_current) { + this._collection$_current = this.$ti._eval$1("1?")._as(_current); + }, + $isIterator: 1 + }; + A.ListBase.prototype = { + get$iterator(receiver) { + return new A.ListIterator(receiver, this.get$length(receiver), A.instanceType(receiver)._eval$1("ListIterator")); + }, + elementAt$1(receiver, index) { + return this.$index(receiver, index); + }, + map$1$1(receiver, f, $T) { + var t1 = A.instanceType(receiver); + return new A.MappedListIterable(receiver, t1._bind$1($T)._eval$1("1(ListBase.E)")._as(f), t1._eval$1("@")._bind$1($T)._eval$1("MappedListIterable<1,2>")); + }, + toString$0(receiver) { + return A.Iterable_iterableToFullString(receiver, "[", "]"); + } + }; + A.MapBase.prototype = { + forEach$1(_, action) { + var t2, key, t3, + t1 = A._instanceType(this); + t1._eval$1("~(1,2)")._as(action); + for (t2 = this.get$keys(), t2 = t2.get$iterator(t2), t1 = t1._rest[1]; t2.moveNext$0();) { + key = t2.get$current(); + t3 = this.$index(0, key); + action.call$2(key, t3 == null ? t1._as(t3) : t3); + } + }, + get$length(_) { + var t1 = this.get$keys(); + return t1.get$length(t1); + }, + toString$0(_) { + return A.MapBase_mapToString(this); + }, + $isMap: 1 + }; + A.MapBase_mapToString_closure.prototype = { + call$2(k, v) { + var t2, + t1 = this._box_0; + if (!t1.first) + this.result._contents += ", "; + t1.first = false; + t1 = this.result; + t2 = A.S(k); + t2 = t1._contents += t2; + t1._contents = t2 + ": "; + t2 = A.S(v); + t1._contents += t2; + }, + $signature: 19 + }; + A._UnmodifiableMapMixin.prototype = {}; + A.MapView.prototype = { + $index(_, key) { + return this._collection$_map.$index(0, key); + }, + forEach$1(_, action) { + this._collection$_map.forEach$1(0, A._instanceType(this)._eval$1("~(1,2)")._as(action)); + }, + get$length(_) { + return this._collection$_map.__js_helper$_length; + }, + get$keys() { + var t1 = this._collection$_map; + return new A.LinkedHashMapKeysIterable(t1, A._instanceType(t1)._eval$1("LinkedHashMapKeysIterable<1>")); + }, + toString$0(_) { + return A.MapBase_mapToString(this._collection$_map); + }, + $isMap: 1 + }; + A.UnmodifiableMapView.prototype = {}; + A._UnmodifiableMapView_MapView__UnmodifiableMapMixin.prototype = {}; + A.Base64Codec.prototype = {}; + A.Base64Encoder.prototype = { + convert$1(input) { + var t1; + type$.List_int._as(input); + t1 = input.length; + if (t1 === 0) + return ""; + t1 = new A._Base64Encoder("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").encode$4(input, 0, t1, true); + t1.toString; + return A.String_String$fromCharCodes(t1); + } + }; + A._Base64Encoder.prototype = { + encode$4(bytes, start, end, isLast) { + var t1, byteCount, fullChunks, bufferLength, output; + type$.List_int._as(bytes); + t1 = this._convert$_state; + byteCount = (t1 & 3) + (end - start); + fullChunks = B.JSInt_methods._tdivFast$1(byteCount, 3); + bufferLength = fullChunks * 4; + if (byteCount - fullChunks * 3 > 0) + bufferLength += 4; + output = new Uint8Array(bufferLength); + this._convert$_state = A._Base64Encoder_encodeChunk(this._alphabet, bytes, start, end, true, output, 0, t1); + if (bufferLength > 0) + return output; + return null; + } + }; + A.Base64Decoder.prototype = { + convert$1(input) { + var decoder, t1, t2, + end = A.RangeError_checkValidRange(0, null, input.length); + if (0 === end) + return new Uint8Array(0); + decoder = new A._Base64Decoder(); + t1 = decoder.decode$3(input, 0, end); + t1.toString; + t2 = decoder._convert$_state; + if (t2 < -1) + A.throwExpression(A.FormatException$("Missing padding character", input, end)); + if (t2 > 0) + A.throwExpression(A.FormatException$("Invalid length, must be multiple of four", input, end)); + decoder._convert$_state = -1; + return t1; + } + }; + A._Base64Decoder.prototype = { + decode$3(input, start, end) { + var buffer, _this = this, + t1 = _this._convert$_state; + if (t1 < 0) { + _this._convert$_state = A._Base64Decoder__checkPadding(input, start, end, t1); + return null; + } + if (start === end) + return new Uint8Array(0); + buffer = A._Base64Decoder__allocateBuffer(input, start, end, t1); + _this._convert$_state = A._Base64Decoder_decodeChunk(input, start, end, buffer, 0, _this._convert$_state); + return buffer; + } + }; + A.Codec.prototype = {}; + A.Converter.prototype = {}; + A.NoSuchMethodError_toString_closure.prototype = { + call$2(key, value) { + var t1, t2, t3; + type$.Symbol._as(key); + t1 = this.sb; + t2 = this._box_0; + t3 = t1._contents += t2.comma; + t3 += key.__internal$_name; + t1._contents = t3; + t1._contents = t3 + ": "; + t3 = A.Error_safeToString(value); + t1._contents += t3; + t2.comma = ", "; + }, + $signature: 20 + }; + A.DateTime.prototype = { + $eq(_, other) { + if (other == null) + return false; + return other instanceof A.DateTime && this._value === other._value && this._microsecond === other._microsecond && this.isUtc === other.isUtc; + }, + get$hashCode(_) { + return A.Object_hash(this._value, this._microsecond); + }, + toString$0(_) { + var _this = this, + y = A.DateTime__fourDigits(A.Primitives_getYear(_this)), + m = A.DateTime__twoDigits(A.Primitives_getMonth(_this)), + d = A.DateTime__twoDigits(A.Primitives_getDay(_this)), + h = A.DateTime__twoDigits(A.Primitives_getHours(_this)), + min = A.DateTime__twoDigits(A.Primitives_getMinutes(_this)), + sec = A.DateTime__twoDigits(A.Primitives_getSeconds(_this)), + ms = A.DateTime__threeDigits(A.Primitives_getMilliseconds(_this)), + t1 = _this._microsecond, + us = t1 === 0 ? "" : A.DateTime__threeDigits(t1); + t1 = y + "-" + m; + if (_this.isUtc) + return t1 + "-" + d + " " + h + ":" + min + ":" + sec + "." + ms + us + "Z"; + else + return t1 + "-" + d + " " + h + ":" + min + ":" + sec + "." + ms + us; + } + }; + A._Enum.prototype = { + toString$0(_) { + return this._enumToString$0(); + } + }; + A.Error.prototype = { + get$stackTrace() { + return A.Primitives_extractStackTrace(this); + } + }; + A.AssertionError.prototype = { + toString$0(_) { + var t1 = this.message; + if (t1 != null) + return "Assertion failed: " + A.Error_safeToString(t1); + return "Assertion failed"; + } + }; + A.TypeError.prototype = {}; + A.ArgumentError.prototype = { + get$_errorName() { + return "Invalid argument" + (!this._hasValue ? "(s)" : ""); + }, + get$_errorExplanation() { + return ""; + }, + toString$0(_) { + var _this = this, + $name = _this.name, + nameString = $name == null ? "" : " (" + $name + ")", + message = _this.message, + messageString = message == null ? "" : ": " + A.S(message), + prefix = _this.get$_errorName() + nameString + messageString; + if (!_this._hasValue) + return prefix; + return prefix + _this.get$_errorExplanation() + ": " + A.Error_safeToString(_this.get$invalidValue()); + }, + get$invalidValue() { + return this.invalidValue; + } + }; + A.RangeError.prototype = { + get$invalidValue() { + return A._asNumQ(this.invalidValue); + }, + get$_errorName() { + return "RangeError"; + }, + get$_errorExplanation() { + var explanation, + start = this.start, + end = this.end; + if (start == null) + explanation = end != null ? ": Not less than or equal to " + A.S(end) : ""; + else if (end == null) + explanation = ": Not greater than or equal to " + A.S(start); + else if (end > start) + explanation = ": Not in inclusive range " + A.S(start) + ".." + A.S(end); + else + explanation = end < start ? ": Valid value range is empty" : ": Only valid value is " + A.S(start); + return explanation; + } + }; + A.IndexError.prototype = { + get$invalidValue() { + return A._asInt(this.invalidValue); + }, + get$_errorName() { + return "RangeError"; + }, + get$_errorExplanation() { + if (A._asInt(this.invalidValue) < 0) + return ": index must not be negative"; + var t1 = this.length; + if (t1 === 0) + return ": no indices are valid"; + return ": index should be less than " + t1; + }, + get$length(receiver) { + return this.length; + } + }; + A.NoSuchMethodError.prototype = { + toString$0(_) { + var $arguments, t1, _i, t2, t3, argument, receiverText, actualParameters, _this = this, _box_0 = {}, + sb = new A.StringBuffer(""); + _box_0.comma = ""; + $arguments = _this._core$_arguments; + for (t1 = $arguments.length, _i = 0, t2 = "", t3 = ""; _i < t1; ++_i, t3 = ", ") { + argument = $arguments[_i]; + sb._contents = t2 + t3; + t2 = A.Error_safeToString(argument); + t2 = sb._contents += t2; + _box_0.comma = ", "; + } + _this._namedArguments.forEach$1(0, new A.NoSuchMethodError_toString_closure(_box_0, sb)); + receiverText = A.Error_safeToString(_this._core$_receiver); + actualParameters = sb.toString$0(0); + return "NoSuchMethodError: method not found: '" + _this._core$_memberName.__internal$_name + "'\nReceiver: " + receiverText + "\nArguments: [" + actualParameters + "]"; + } + }; + A.UnsupportedError.prototype = { + toString$0(_) { + return "Unsupported operation: " + this.message; + } + }; + A.UnimplementedError.prototype = { + toString$0(_) { + return "UnimplementedError: " + this.message; + } + }; + A.StateError.prototype = { + toString$0(_) { + return "Bad state: " + this.message; + } + }; + A.ConcurrentModificationError.prototype = { + toString$0(_) { + var t1 = this.modifiedObject; + if (t1 == null) + return "Concurrent modification during iteration."; + return "Concurrent modification during iteration: " + A.Error_safeToString(t1) + "."; + } + }; + A.OutOfMemoryError.prototype = { + toString$0(_) { + return "Out of Memory"; + }, + get$stackTrace() { + return null; + }, + $isError: 1 + }; + A.StackOverflowError.prototype = { + toString$0(_) { + return "Stack Overflow"; + }, + get$stackTrace() { + return null; + }, + $isError: 1 + }; + A._Exception.prototype = { + toString$0(_) { + return "Exception: " + this.message; + } + }; + A.FormatException.prototype = { + toString$0(_) { + var lineEnd, lineNum, lineStart, previousCharWasCR, i, char, prefix, postfix, end, start, + message = this.message, + report = "" !== message ? "FormatException: " + message : "FormatException", + offset = this.offset, + source = this.source, + t1 = offset < 0 || offset > source.length; + if (t1) + offset = null; + if (offset == null) { + if (source.length > 78) + source = B.JSString_methods.substring$2(source, 0, 75) + "..."; + return report + "\n" + source; + } + for (lineEnd = source.length, lineNum = 1, lineStart = 0, previousCharWasCR = false, i = 0; i < offset; ++i) { + if (!(i < lineEnd)) + return A.ioore(source, i); + char = source.charCodeAt(i); + if (char === 10) { + if (lineStart !== i || !previousCharWasCR) + ++lineNum; + lineStart = i + 1; + previousCharWasCR = false; + } else if (char === 13) { + ++lineNum; + lineStart = i + 1; + previousCharWasCR = true; + } + } + report = lineNum > 1 ? report + (" (at line " + lineNum + ", character " + (offset - lineStart + 1) + ")\n") : report + (" (at character " + (offset + 1) + ")\n"); + for (i = offset; i < lineEnd; ++i) { + if (!(i >= 0)) + return A.ioore(source, i); + char = source.charCodeAt(i); + if (char === 10 || char === 13) { + lineEnd = i; + break; + } + } + prefix = ""; + if (lineEnd - lineStart > 78) { + postfix = "..."; + if (offset - lineStart < 75) { + end = lineStart + 75; + start = lineStart; + } else { + if (lineEnd - offset < 75) { + start = lineEnd - 75; + end = lineEnd; + postfix = ""; + } else { + start = offset - 36; + end = offset + 36; + } + prefix = "..."; + } + } else { + end = lineEnd; + start = lineStart; + postfix = ""; + } + return report + prefix + B.JSString_methods.substring$2(source, start, end) + postfix + "\n" + B.JSString_methods.$mul(" ", offset - start + prefix.length) + "^\n"; + } + }; + A.Iterable.prototype = { + map$1$1(_, toElement, $T) { + var t1 = A._instanceType(this); + return A.MappedIterable_MappedIterable(this, t1._bind$1($T)._eval$1("1(Iterable.E)")._as(toElement), t1._eval$1("Iterable.E"), $T); + }, + get$length(_) { + var count, + it = this.get$iterator(this); + for (count = 0; it.moveNext$0();) + ++count; + return count; + }, + elementAt$1(_, index) { + var iterator, skipCount; + A.RangeError_checkNotNegative(index, "index"); + iterator = this.get$iterator(this); + for (skipCount = index; iterator.moveNext$0();) { + if (skipCount === 0) + return iterator.get$current(); + --skipCount; + } + throw A.wrapException(A.IndexError$withLength(index, index - skipCount, this, "index")); + }, + toString$0(_) { + return A.Iterable_iterableToShortString(this, "(", ")"); + } + }; + A.Null.prototype = { + get$hashCode(_) { + return A.Object.prototype.get$hashCode.call(this, 0); + }, + toString$0(_) { + return "null"; + } + }; + A.Object.prototype = {$isObject: 1, + $eq(_, other) { + return this === other; + }, + get$hashCode(_) { + return A.Primitives_objectHashCode(this); + }, + toString$0(_) { + return "Instance of '" + A.Primitives_objectTypeName(this) + "'"; + }, + noSuchMethod$1(_, invocation) { + throw A.wrapException(A.NoSuchMethodError_NoSuchMethodError$withInvocation(this, type$.Invocation._as(invocation))); + }, + get$runtimeType(_) { + return A.getRuntimeTypeOfDartObject(this); + }, + toString() { + return this.toString$0(this); + } + }; + A._StringStackTrace.prototype = { + toString$0(_) { + return ""; + }, + $isStackTrace: 1 + }; + A.StringBuffer.prototype = { + get$length(_) { + return this._contents.length; + }, + toString$0(_) { + var t1 = this._contents; + return t1.charCodeAt(0) == 0 ? t1 : t1; + } + }; + A.jsify__convert.prototype = { + call$1(o) { + var t1, convertedMap, key, convertedList; + if (A._noJsifyRequired(o)) + return o; + t1 = this._convertedObjects; + if (t1.containsKey$1(o)) + return t1.$index(0, o); + if (type$.Map_of_nullable_Object_and_nullable_Object._is(o)) { + convertedMap = {}; + t1.$indexSet(0, o, convertedMap); + for (t1 = o.get$keys(), t1 = t1.get$iterator(t1); t1.moveNext$0();) { + key = t1.get$current(); + convertedMap[key] = this.call$1(o.$index(0, key)); + } + return convertedMap; + } else if (type$.Iterable_nullable_Object._is(o)) { + convertedList = []; + t1.$indexSet(0, o, convertedList); + B.JSArray_methods.addAll$1(convertedList, J.map$1$1$ax(o, this, type$.dynamic)); + return convertedList; + } else + return o; + }, + $signature: 8 + }; + A.promiseToFuture_closure.prototype = { + call$1(r) { + return this.completer.complete$1(this.T._eval$1("0/?")._as(r)); + }, + $signature: 3 + }; + A.promiseToFuture_closure0.prototype = { + call$1(e) { + if (e == null) + return this.completer.completeError$1(new A.NullRejectionException(e === undefined)); + return this.completer.completeError$1(e); + }, + $signature: 3 + }; + A.dartify_convert.prototype = { + call$1(o) { + var t1, millisSinceEpoch, proto, t2, dartObject, originalKeys, dartKeys, i, jsKey, dartKey, l, $length; + if (A._noDartifyRequired(o)) + return o; + t1 = this._convertedObjects; + o.toString; + if (t1.containsKey$1(o)) + return t1.$index(0, o); + if (o instanceof Date) { + millisSinceEpoch = o.getTime(); + if (millisSinceEpoch < -864e13 || millisSinceEpoch > 864e13) + A.throwExpression(A.RangeError$range(millisSinceEpoch, -864e13, 864e13, "millisecondsSinceEpoch", null)); + A.checkNotNullable(true, "isUtc", type$.bool); + return new A.DateTime(millisSinceEpoch, 0, true); + } + if (o instanceof RegExp) + throw A.wrapException(A.ArgumentError$("structured clone of RegExp", null)); + if (typeof Promise != "undefined" && o instanceof Promise) + return A.promiseToFuture(o, type$.nullable_Object); + proto = Object.getPrototypeOf(o); + if (proto === Object.prototype || proto === null) { + t2 = type$.nullable_Object; + dartObject = A.LinkedHashMap_LinkedHashMap$_empty(t2, t2); + t1.$indexSet(0, o, dartObject); + originalKeys = Object.keys(o); + dartKeys = []; + for (t1 = J.getInterceptor$ax(originalKeys), t2 = t1.get$iterator(originalKeys); t2.moveNext$0();) + dartKeys.push(A.dartify(t2.get$current())); + for (i = 0; i < t1.get$length(originalKeys); ++i) { + jsKey = t1.$index(originalKeys, i); + if (!(i < dartKeys.length)) + return A.ioore(dartKeys, i); + dartKey = dartKeys[i]; + if (jsKey != null) + dartObject.$indexSet(0, dartKey, this.call$1(o[jsKey])); + } + return dartObject; + } + if (o instanceof Array) { + l = o; + dartObject = []; + t1.$indexSet(0, o, dartObject); + $length = A._asInt(o.length); + for (t1 = J.getInterceptor$asx(l), i = 0; i < $length; ++i) + dartObject.push(this.call$1(t1.$index(l, i))); + return dartObject; + } + return o; + }, + $signature: 8 + }; + A.NullRejectionException.prototype = { + toString$0(_) { + return "Promise was rejected with a value of `" + (this.isUndefined ? "undefined" : "null") + "`."; + } + }; + A._JSSecureRandom.prototype = { + _JSSecureRandom$0() { + var $crypto = self.crypto; + if ($crypto != null) + if ($crypto.getRandomValues != null) + return; + throw A.wrapException(A.UnsupportedError$("No source of cryptographically secure random numbers available.")); + }, + nextInt$1(max) { + var byteCount, t1, start, randomLimit, t2, t3, random, result, _null = null; + if (max <= 0 || max > 4294967296) + throw A.wrapException(new A.RangeError(_null, _null, false, _null, _null, "max must be in range 0 < max \u2264 2^32, was " + max)); + if (max > 255) + if (max > 65535) + byteCount = max > 16777215 ? 4 : 3; + else + byteCount = 2; + else + byteCount = 1; + t1 = this._math$_buffer; + t1.$flags & 2 && A.throwUnsupportedOperation(t1, 11); + t1.setUint32(0, 0, false); + start = 4 - byteCount; + randomLimit = A._asInt(Math.pow(256, byteCount)); + for (t2 = max - 1, t3 = (max & t2) === 0; true;) { + crypto.getRandomValues(J.asUint8List$2$x(B.NativeByteData_methods.get$buffer(t1), start, byteCount)); + random = t1.getUint32(0, false); + if (t3) + return (random & t2) >>> 0; + result = random % max; + if (random - result + max < randomLimit) + return result; + } + } + }; + A.CryptorError.prototype = { + _enumToString$0() { + return "CryptorError." + this._name; + } + }; + A.FrameInfo.prototype = {}; + A.FrameCryptor.prototype = { + get$enabled() { + if (this.participantIdentity == null) + return false; + return this._enabled; + }, + setupTransform$6$codec$kind$operation$readable$trackId$writable(codec, kind, operation, readable, trackId, writable) { + return this.setupTransform$body$FrameCryptor(codec, kind, operation, readable, trackId, writable); + }, + setupTransform$5$kind$operation$readable$trackId$writable(kind, operation, readable, trackId, writable) { + return this.setupTransform$6$codec$kind$operation$readable$trackId$writable(null, kind, operation, readable, trackId, writable); + }, + setupTransform$body$FrameCryptor(codec, kind, operation, readable, trackId, writable) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, transformer, e, t2, t3, t4, t5, exception, t1; + var $async$setupTransform$6$codec$kind$operation$readable$trackId$writable = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $.$get$logger(); + t1.log$4(B.Level_INFO_800, "setupTransform " + operation + " kind " + kind, null, null); + $async$self.__FrameCryptor_kind_A = kind; + if (codec != null) { + t1.log$4(B.Level_INFO_800, "setting codec on cryptor to " + codec, null, null); + $async$self.codec = codec; + } + t1 = self.TransformStream; + t2 = operation === "encode" ? $async$self.get$encodeFunction() : $async$self.get$decodeFunction(); + t3 = type$.Future_void_Function_JSObject_JSObject; + t4 = type$.String; + t5 = type$.JSObject; + transformer = t5._as(new t1(t5._as(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["transform", A.allowInterop(t2, t3)], t4, t3))))); + try { + t5._as(t5._as(readable.pipeThrough(transformer)).pipeTo(writable)); + } catch (exception) { + e = A.unwrapException(exception); + $.$get$logger().log$4(B.Level_WARNING_900, "e " + J.toString$0$(e), null, null); + if ($async$self.lastError !== B.CryptorError_7) { + $async$self.lastError = B.CryptorError_7; + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", $async$self.participantIdentity, "state", "internalError", "error", "Internal error: " + J.toString$0$(e)], t4, type$.nullable_String))); + } + } + $async$self.trackId = trackId; + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$setupTransform$6$codec$kind$operation$readable$trackId$writable, $async$completer); + }, + getUnencryptedBytes$2(obj, codec) { + var naluIndices, t1, t2, _i, index, type, _null = null, frameType = "", + data = A.NativeUint8List_NativeUint8List$view(type$.NativeByteBuffer._as(obj.data), 0, _null); + if ("type" in obj) { + frameType = A._asString(obj.type); + $.$get$logger().log$4(B.Level_FINER_400, "frameType: " + frameType, _null, _null); + } + if (codec != null && codec.toLowerCase() === "h264") { + type$.Uint8List._as(data); + naluIndices = A.findNALUIndices(data); + for (t1 = naluIndices.length, t2 = data.length, _i = 0; _i < naluIndices.length; naluIndices.length === t1 || (0, A.throwConcurrentModificationError)(naluIndices), ++_i) { + index = naluIndices[_i]; + if (!(index < t2)) + return A.ioore(data, index); + type = data[index] & 31; + switch (type) { + case 5: + case 1: + t1 = index + 2; + $.$get$logger().log$4(B.Level_FINER_400, "unEncryptedBytes NALU of type " + type + ", offset " + t1, _null, _null); + return t1; + default: + $.$get$logger().log$4(B.Level_FINER_400, "skipping NALU of type " + type, _null, _null); + break; + } + } + throw A.wrapException(A.Exception_Exception("Could not find NALU")); + } + switch (frameType) { + case "key": + return 10; + case "delta": + return 3; + case "audio": + return 1; + default: + return 0; + } + }, + readFrameInfo$1(frameObj) { + var buffer, frameType, t1, synchronizationSource, timestamp; + new Uint8Array(0); + buffer = A.NativeUint8List_NativeUint8List$view(type$.NativeByteBuffer._as(frameObj.data), 0, null); + if ("type" in frameObj) { + frameType = A._asString(frameObj.type); + $.$get$logger().log$4(B.Level_FINER_400, "frameType: " + frameType, null, null); + } else + frameType = ""; + t1 = type$.JSObject; + synchronizationSource = A._asInt(t1._as(frameObj.getMetadata()).synchronizationSource); + if ("rtpTimestamp" in t1._as(frameObj.getMetadata())) + timestamp = B.JSInt_methods.toInt$0(A._asInt(t1._as(frameObj.getMetadata()).rtpTimestamp)); + else + timestamp = "timestamp" in frameObj ? A._asInt(A._asDouble(frameObj.timestamp)) : 0; + return new A.FrameInfo(frameType, synchronizationSource, timestamp, buffer); + }, + enqueueFrame$3(frameObj, controller, buffer) { + var t1 = type$.NativeByteBuffer._as(B.NativeUint8List_methods.get$buffer(buffer.toBytes$0())); + frameObj.data = t1; + controller.enqueue(frameObj); + }, + encodeFunction$2(frameObj, controller) { + var t1 = type$.JSObject; + return this.encodeFunction$body$FrameCryptor(t1._as(frameObj), t1._as(controller)); + }, + encodeFunction$body$FrameCryptor(frameObj, controller) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$returnValue, $async$handler = 2, $async$errorStack = [], $async$self = this, srcFrame, secretKey, keyIndex, headerLength, iv, frameTrailer, cipherText, finalBuffer, e, t1, t2, t3, t4, iv0, sendCount, t5, t6, t7, exception, $async$exception, $async$temp1; + var $async$encodeFunction$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) { + $async$errorStack.push($async$result); + $async$goto = $async$handler; + } + while (true) + switch ($async$goto) { + case 0: + // Function start + $async$handler = 4; + t1 = true; + if ($async$self.get$enabled()) { + t2 = type$.NativeByteBuffer; + if (!(t2._as(frameObj.data).byteLength === 0)) + t1 = t2._as(frameObj.data).byteLength === 0; + } + if (t1) { + if ($async$self.keyHandler.keyOptions.discardFrameWhenCryptorNotReady) { + // goto return + $async$goto = 1; + break; + } + controller.enqueue(frameObj); + // goto return + $async$goto = 1; + break; + } + srcFrame = $async$self.readFrameInfo$1(frameObj); + t1 = $.$get$logger(); + t1.log$4(B.Level_FINE_500, "encodeFunction: buffer " + srcFrame.buffer.length + ", synchronizationSource " + srcFrame.ssrc + " frameType " + srcFrame.frameType, null, null); + t2 = $async$self.keyHandler.getKeySet$1($async$self.currentKeyIndex); + secretKey = t2 == null ? null : t2.encryptionKey; + keyIndex = $async$self.currentKeyIndex; + if (secretKey == null) { + if ($async$self.lastError !== B.CryptorError_5) { + $async$self.lastError = B.CryptorError_5; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + t3 = $async$self.__FrameCryptor_kind_A; + t3 === $ && A.throwLateFieldNI("kind"); + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", t3, "state", "missingKey", "error", "Missing key for track " + t2], type$.String, type$.nullable_String))); + } + // goto return + $async$goto = 1; + break; + } + t2 = $async$self.__FrameCryptor_kind_A; + t2 === $ && A.throwLateFieldNI("kind"); + headerLength = t2 === "video" ? $async$self.getUnencryptedBytes$2(frameObj, $async$self.codec) : 1; + t3 = srcFrame.ssrc; + t4 = srcFrame.timestamp; + iv0 = new DataView(new ArrayBuffer(12)); + t2 = $async$self.sendCounts; + if (t2.$index(0, t3) == null) + t2.$indexSet(0, t3, $.$get$Random__secureRandom().nextInt$1(65535)); + sendCount = t2.$index(0, t3); + if (sendCount == null) + sendCount = 0; + iv0.setUint32(0, t3, false); + iv0.setUint32(4, t4, false); + iv0.setUint32(8, t4 - B.JSInt_methods.$mod(sendCount, 65535), false); + t2.$indexSet(0, t3, sendCount + 1); + iv = J.asUint8List$0$x(B.NativeByteData_methods.get$buffer(iv0)); + frameTrailer = new DataView(new ArrayBuffer(2)); + t2 = frameTrailer; + t2.$flags & 2 && A.throwUnsupportedOperation(t2, 6); + J._setInt8$2$x(t2, 0, 12); + t2 = frameTrailer; + t3 = A._asInt(keyIndex); + t2.$flags & 2 && A.throwUnsupportedOperation(t2, 6); + J._setInt8$2$x(t2, 1, t3); + t3 = $async$self.worker; + t2 = type$.JSObject; + t4 = t2._as(t2._as(t3.crypto).subtle); + t5 = type$.String; + t6 = type$.Object; + t7 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "AES-GCM", "iv", iv, "additionalData", B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, headerLength)], t5, t6)); + t6 = t7 == null ? t6._as(t7) : t7; + $async$temp1 = type$.NativeByteBuffer; + $async$goto = 7; + return A._asyncAwait(A.promiseToFuture(t2._as(t4.encrypt(t6, secretKey, B.NativeUint8List_methods.sublist$2(srcFrame.buffer, headerLength, srcFrame.buffer.length))), type$.nullable_Object), $async$encodeFunction$2); + case 7: + // returning from await. + cipherText = $async$temp1._as($async$result); + t1.log$4(B.Level_FINER_400, "encodeFunction: encrypted buffer: " + srcFrame.buffer.length + ", cipherText: " + A.NativeUint8List_NativeUint8List$view(cipherText, 0, null).length, null, null); + t2 = $.$get$_CopyingBytesBuilder__emptyList(); + finalBuffer = new A._CopyingBytesBuilder(t2); + J.add$1$ax(finalBuffer, new Uint8Array(A._ensureNativeList(B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, headerLength)))); + J.add$1$ax(finalBuffer, A.NativeUint8List_NativeUint8List$view(cipherText, 0, null)); + J.add$1$ax(finalBuffer, iv); + J.add$1$ax(finalBuffer, J.asUint8List$0$x(J.get$buffer$x(frameTrailer))); + $async$self.enqueueFrame$3(frameObj, controller, finalBuffer); + if ($async$self.lastError !== B.CryptorError_1) { + $async$self.lastError = B.CryptorError_1; + t3.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", $async$self.participantIdentity, "trackId", $async$self.trackId, "kind", $async$self.__FrameCryptor_kind_A, "state", "ok", "error", "encryption ok"], t5, type$.nullable_String))); + } + t1.log$4(B.Level_FINER_400, "encodeFunction[CryptorError.kOk]: frame enqueued kind " + $async$self.__FrameCryptor_kind_A + ",codec " + A.S($async$self.codec) + " headerLength: " + A.S(headerLength) + ", timestamp: " + srcFrame.timestamp + ", ssrc: " + srcFrame.ssrc + ", data length: " + srcFrame.buffer.length + ", encrypted length: " + finalBuffer.toBytes$0().length + ", iv " + A.S(iv), null, null); + $async$handler = 2; + // goto after finally + $async$goto = 6; + break; + case 4: + // catch + $async$handler = 3; + $async$exception = $async$errorStack.pop(); + e = A.unwrapException($async$exception); + $.$get$logger().log$4(B.Level_WARNING_900, "encodeFunction encrypt: e " + J.toString$0$(e), null, null); + if ($async$self.lastError !== B.CryptorError_3) { + $async$self.lastError = B.CryptorError_3; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + t3 = $async$self.__FrameCryptor_kind_A; + t3 === $ && A.throwLateFieldNI("kind"); + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", t3, "state", "encryptError", "error", J.toString$0$(e)], type$.String, type$.nullable_String))); + } + // goto after finally + $async$goto = 6; + break; + case 3: + // uncaught + // goto rethrow + $async$goto = 2; + break; + case 6: + // after finally + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + case 2: + // rethrow + return A._asyncRethrow($async$errorStack.at(-1), $async$completer); + } + }); + return A._asyncStartSync($async$encodeFunction$2, $async$completer); + }, + decodeFunction$2(frameObj, controller) { + var t1 = type$.JSObject; + return this.decodeFunction$body$FrameCryptor(t1._as(frameObj), t1._as(controller)); + }, + decodeFunction$body$FrameCryptor(frameObj, controller) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$returnValue, $async$handler = 2, $async$errorStack = [], $async$self = this, headerLength, frameTrailer, ivLength, keyIndex, iv, decryptFrameInternal, ratchedKeyInternal, e, finalBuffer, e0, t2, t3, t4, t5, t6, magicBytesBuffer, t7, initialKeySet, exception, t1, srcFrame, $async$exception, $async$exception1; + var $async$decodeFunction$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) { + $async$errorStack.push($async$result); + $async$goto = $async$handler; + } + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = {}; + srcFrame = $async$self.readFrameInfo$1(frameObj); + t1.ratchetCount = 0; + t2 = $.$get$logger(); + t2.log$4(B.Level_FINE_500, "decodeFunction: frame lenght " + srcFrame.buffer.length, null, null); + t1.initialKeySet = t1.decrypted = null; + t1.initialKeyIndex = $async$self.currentKeyIndex; + if (!$async$self.get$enabled() || srcFrame.buffer.length === 0) { + $async$self.sifGuard.recordUserFrame$0(); + if ($async$self.keyHandler.keyOptions.discardFrameWhenCryptorNotReady) { + // goto return + $async$goto = 1; + break; + } + t2.log$4(B.Level_FINE_500, "enqueing empty frame", null, null); + controller.enqueue(frameObj); + t2.log$4(B.Level_FINER_400, "enqueing silent frame", null, null); + // goto return + $async$goto = 1; + break; + } + t3 = $async$self.keyHandler.keyOptions.uncryptedMagicBytes; + if (t3 != null) { + t4 = srcFrame.buffer; + t5 = t3.length; + t6 = t5 + 1; + if (t4.length > t6) { + magicBytesBuffer = B.NativeUint8List_methods.sublist$2(srcFrame.buffer, srcFrame.buffer.length - t5 - 1, srcFrame.buffer.length - 1); + t2.log$4(B.Level_FINER_400, "magicBytesBuffer " + A.S(magicBytesBuffer) + ", magicBytes " + A.S(t3), null, null); + t4 = $async$self.sifGuard; + if (A.Iterable_iterableToFullString(magicBytesBuffer, "[", "]") === A.Iterable_iterableToFullString(t3, "[", "]")) { + ++t4.consecutiveSifCount; + if (t4.sifSequenceStartedAt == null) + t4.sifSequenceStartedAt = Date.now(); + t4.lastSifReceivedAt = Date.now(); + if (t4.consecutiveSifCount < 100) + if (t4.sifSequenceStartedAt != null) { + t1 = Date.now(); + t4 = t4.sifSequenceStartedAt; + t4.toString; + t4 = t1 - t4 < 2000; + t1 = t4; + } else + t1 = true; + else + t1 = false; + if (t1) { + t1 = B.NativeUint8List_methods.sublist$1(srcFrame.buffer, srcFrame.buffer.length - 1); + if (0 >= t1.length) { + $async$returnValue = A.ioore(t1, 0); + // goto return + $async$goto = 1; + break; + } + t2.log$4(B.Level_FINER_400, "ecodeFunction: skip uncrypted frame, type " + t1[0], null, null); + finalBuffer = new A._CopyingBytesBuilder($.$get$_CopyingBytesBuilder__emptyList()); + finalBuffer.add$1(0, new Uint8Array(A._ensureNativeList(B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, srcFrame.buffer.length - t6)))); + $async$self.enqueueFrame$3(frameObj, controller, finalBuffer); + t2.log$4(B.Level_FINE_500, "ecodeFunction: enqueing silent frame", null, null); + controller.enqueue(frameObj); + } else + t2.log$4(B.Level_FINER_400, "ecodeFunction: SIF limit reached, dropping frame", null, null); + t2.log$4(B.Level_FINER_400, "ecodeFunction: enqueing silent frame", null, null); + controller.enqueue(frameObj); + // goto return + $async$goto = 1; + break; + } else + t4.recordUserFrame$0(); + } + } + $async$handler = 4; + t3 = {}; + t4 = $async$self.__FrameCryptor_kind_A; + t4 === $ && A.throwLateFieldNI("kind"); + headerLength = t4 === "video" ? $async$self.getUnencryptedBytes$2(frameObj, $async$self.codec) : 1; + frameTrailer = B.NativeUint8List_methods.sublist$1(srcFrame.buffer, srcFrame.buffer.length - 2); + ivLength = J.$index$asx(frameTrailer, 0); + keyIndex = J.$index$asx(frameTrailer, 1); + t5 = srcFrame.buffer; + t6 = srcFrame.buffer; + t7 = ivLength; + if (typeof t7 !== "number") { + $async$returnValue = A.iae(t7); + // goto return + $async$goto = 1; + break; + } + iv = B.NativeUint8List_methods.sublist$2(t5, t6.length - t7 - 2, srcFrame.buffer.length - 2); + initialKeySet = t1.initialKeySet = $async$self.keyHandler.getKeySet$1(keyIndex); + t1.initialKeyIndex = keyIndex; + t2.log$4(B.Level_FINER_400, "decodeFunction: start decrypting frame headerLength " + A.S(headerLength) + " " + srcFrame.buffer.length + " frameTrailer " + A.S(frameTrailer) + ", ivLength " + A.S(ivLength) + ", keyIndex " + A.S(keyIndex) + ", iv " + A.S(iv), null, null); + if (initialKeySet == null || !$async$self.keyHandler._hasValidKey) { + if ($async$self.lastError !== B.CryptorError_5) { + $async$self.lastError = B.CryptorError_5; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", $async$self.__FrameCryptor_kind_A, "state", "missingKey", "error", "Missing key for track " + t2], type$.String, type$.nullable_String))); + } + // goto return + $async$goto = 1; + break; + } + t3.currentkeySet = initialKeySet; + decryptFrameInternal = new A.FrameCryptor_decodeFunction_decryptFrameInternal(t1, t3, $async$self, iv, srcFrame, headerLength, ivLength); + ratchedKeyInternal = new A.FrameCryptor_decodeFunction_ratchedKeyInternal(t1, t3, $async$self, decryptFrameInternal); + $async$handler = 8; + $async$goto = 11; + return A._asyncAwait(decryptFrameInternal.call$0(), $async$decodeFunction$2); + case 11: + // returning from await. + $async$handler = 4; + // goto after finally + $async$goto = 10; + break; + case 8: + // catch + $async$handler = 7; + $async$exception = $async$errorStack.pop(); + e = A.unwrapException($async$exception); + $async$self.lastError = B.CryptorError_7; + t2 = $.$get$logger(); + t2.log$4(B.Level_FINER_400, "decodeFunction: kInternalError catch " + A.S(e), null, null); + $async$goto = 12; + return A._asyncAwait(ratchedKeyInternal.call$0(), $async$decodeFunction$2); + case 12: + // returning from await. + // goto after finally + $async$goto = 10; + break; + case 7: + // uncaught + // goto catch + $async$goto = 4; + break; + case 10: + // after finally + t3 = t1.decrypted; + if (t3 == null) { + t1 = A.Exception_Exception("[decodeFunction] decryption failed even after ratchting"); + throw A.wrapException(t1); + } + t4 = $async$self.keyHandler; + t4._decryptionFailureCount = 0; + t4._hasValidKey = true; + t2.log$4(B.Level_FINER_400, "decodeFunction: decryption success, buffer length " + srcFrame.buffer.length + ", decrypted: " + A.NativeUint8List_NativeUint8List$view(t3, 0, null).length, null, null); + t3 = $.$get$_CopyingBytesBuilder__emptyList(); + finalBuffer = new A._CopyingBytesBuilder(t3); + J.add$1$ax(finalBuffer, new Uint8Array(A._ensureNativeList(B.NativeUint8List_methods.sublist$2(srcFrame.buffer, 0, headerLength)))); + t1 = t1.decrypted; + t1.toString; + J.add$1$ax(finalBuffer, A.NativeUint8List_NativeUint8List$view(t1, 0, null)); + $async$self.enqueueFrame$3(frameObj, controller, finalBuffer); + if ($async$self.lastError !== B.CryptorError_1) { + $async$self.lastError = B.CryptorError_1; + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", $async$self.participantIdentity, "trackId", $async$self.trackId, "kind", $async$self.__FrameCryptor_kind_A, "state", "ok", "error", "decryption ok"], type$.String, type$.nullable_String))); + } + t2.log$4(B.Level_FINE_500, "decodeFunction[CryptorError.kOk]: decryption success kind " + $async$self.__FrameCryptor_kind_A + ", headerLength: " + A.S(headerLength) + ", timestamp: " + srcFrame.timestamp + ", ssrc: " + srcFrame.ssrc + ", data length: " + srcFrame.buffer.length + ", decrypted length: " + finalBuffer.toBytes$0().length + ", keyindex " + A.S(keyIndex) + " iv " + A.S(iv), null, null); + $async$handler = 2; + // goto after finally + $async$goto = 6; + break; + case 4: + // catch + $async$handler = 3; + $async$exception1 = $async$errorStack.pop(); + e0 = A.unwrapException($async$exception1); + if ($async$self.lastError !== B.CryptorError_2) { + $async$self.lastError = B.CryptorError_2; + t1 = $async$self.participantIdentity; + t2 = $async$self.trackId; + t3 = $async$self.__FrameCryptor_kind_A; + t3 === $ && A.throwLateFieldNI("kind"); + $async$self.worker.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t1, "trackId", t2, "kind", t3, "state", "decryptError", "error", J.toString$0$(e0)], type$.String, type$.nullable_String))); + } + $async$self.keyHandler.decryptionFailure$0(); + // goto after finally + $async$goto = 6; + break; + case 3: + // uncaught + // goto rethrow + $async$goto = 2; + break; + case 6: + // after finally + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + case 2: + // rethrow + return A._asyncRethrow($async$errorStack.at(-1), $async$completer); + } + }); + return A._asyncStartSync($async$decodeFunction$2, $async$completer); + } + }; + A.FrameCryptor_decodeFunction_decryptFrameInternal.prototype = { + call$0() { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, decrypted, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, $async$temp1; + var $async$call$0 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self.$this; + t2 = t1.worker; + t3 = type$.JSObject; + t4 = t3._as(t3._as(t2.crypto).subtle); + t5 = $async$self.srcFrame; + t6 = t5.buffer; + t7 = $async$self.headerLength; + t8 = type$.String; + t9 = type$.Object; + t10 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "AES-GCM", "iv", $async$self.iv, "additionalData", B.NativeUint8List_methods.sublist$2(t6, 0, t7)], t8, t9)); + t9 = t10 == null ? t9._as(t10) : t10; + t10 = $async$self._box_0; + $async$temp1 = type$.NativeByteBuffer; + $async$goto = 2; + return A._asyncAwait(A.promiseToFuture(t3._as(t4.decrypt(t9, t10.currentkeySet.encryptionKey, B.NativeUint8List_methods.sublist$2(t6, t7, t6.length - $async$self.ivLength - 2))), type$.nullable_Object), $async$call$0); + case 2: + // returning from await. + decrypted = $async$temp1._as($async$result); + t6 = $async$self._box_1; + t6.decrypted = decrypted; + t7 = $.$get$logger(); + t7.log$4(B.Level_FINER_400, string$.decode + A.NativeUint8List_NativeUint8List$view(decrypted, 0, null).length, null, null); + t3 = t6.decrypted; + if (t3 == null) + throw A.wrapException(A.Exception_Exception("[decryptFrameInternal] could not decrypt")); + t7.log$4(B.Level_FINER_400, string$.decode + A.NativeUint8List_NativeUint8List$view(t3, 0, null).length, null, null); + $async$goto = t10.currentkeySet !== t6.initialKeySet ? 3 : 4; + break; + case 3: + // then + t7.log$4(B.Level_FINE_500, "decodeFunction::decryptFrameInternal: ratchetKey: decryption ok, newState: kKeyRatcheted", null, null); + $async$goto = 5; + return A._asyncAwait(t1.keyHandler.setKeySetFromMaterial$2(t10.currentkeySet, t6.initialKeyIndex), $async$call$0); + case 5: + // returning from await. + case 4: + // join + t3 = t1.lastError; + if (t3 !== B.CryptorError_1 && t3 !== B.CryptorError_6 && t6.ratchetCount > 0) { + t7.log$4(B.Level_FINER_400, "decodeFunction::decryptFrameInternal: KeyRatcheted: ssrc " + t5.ssrc + " timestamp " + t5.timestamp + " ratchetCount " + t6.ratchetCount + " participantId: " + A.S(t1.participantIdentity), null, null); + t7.log$4(B.Level_FINER_400, "decodeFunction::decryptFrameInternal: ratchetKey: lastError != CryptorError.kKeyRatcheted, reset state to kKeyRatcheted", null, null); + t1.lastError = B.CryptorError_6; + t3 = t1.participantIdentity; + t4 = t1.trackId; + t1 = t1.__FrameCryptor_kind_A; + t1 === $ && A.throwLateFieldNI("kind"); + t2.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorState", "msgType", "event", "participantId", t3, "trackId", t4, "kind", t1, "state", "keyRatcheted", "error", "Key ratcheted ok"], t8, type$.nullable_String))); + } + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$call$0, $async$completer); + }, + $signature: 10 + }; + A.FrameCryptor_decodeFunction_ratchedKeyInternal.prototype = { + call$0() { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, newKeyBuffer, newMaterial, t1, t2, t3, t4, t5, t6, $async$temp1; + var $async$call$0 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self._box_1; + t2 = t1.ratchetCount; + t3 = $async$self.$this; + t4 = t3.keyHandler; + t5 = t4.keyOptions; + t6 = t5.ratchetWindowSize; + if (t2 >= t6 || t6 <= 0) + throw A.wrapException(A.Exception_Exception("[ratchedKeyInternal] cannot ratchet anymore")); + t2 = $async$self._box_0; + $async$goto = 2; + return A._asyncAwait(t4.ratchet$2(t2.currentkeySet.material, t5.ratchetSalt), $async$call$0); + case 2: + // returning from await. + newKeyBuffer = $async$result; + $async$goto = 3; + return A._asyncAwait(t3.keyHandler.ratchetMaterial$2(t2.currentkeySet.material, J.get$buffer$x(newKeyBuffer)), $async$call$0); + case 3: + // returning from await. + newMaterial = $async$result; + t3 = t3.keyHandler; + $async$temp1 = t2; + $async$goto = 4; + return A._asyncAwait(t3.deriveKeys$2(newMaterial, t3.keyOptions.ratchetSalt), $async$call$0); + case 4: + // returning from await. + $async$temp1.currentkeySet = $async$result; + ++t1.ratchetCount; + $async$goto = 5; + return A._asyncAwait($async$self.decryptFrameInternal.call$0(), $async$call$0); + case 5: + // returning from await. + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$call$0, $async$completer); + }, + $signature: 10 + }; + A.KeyOptions.prototype = { + toString$0(_) { + var _this = this; + return "KeyOptions{sharedKey: " + _this.sharedKey + ", ratchetWindowSize: " + _this.ratchetWindowSize + ", failureTolerance: " + _this.failureTolerance + ", uncryptedMagicBytes: " + A.S(_this.uncryptedMagicBytes) + ", ratchetSalt: " + A.S(_this.ratchetSalt) + "}"; + } + }; + A.KeyProvider.prototype = { + getParticipantKeyHandler$1(participantIdentity) { + var t2, keys, _this = this, + t1 = _this.keyProviderOptions; + if (t1.sharedKey) + return _this.getSharedKeyHandler$0(); + t2 = _this.participantKeys; + keys = t2.$index(0, participantIdentity); + if (keys == null) { + keys = A.ParticipantKeyHandler$(t1, participantIdentity, _this.worker); + t1 = _this.sharedKey; + if (t1.length !== 0) + keys.setKey$1(t1); + t2.$indexSet(0, participantIdentity, keys); + } + return keys; + }, + getSharedKeyHandler$0() { + var _this = this, + t1 = _this.sharedKeyHandler; + return t1 == null ? _this.sharedKeyHandler = A.ParticipantKeyHandler$(_this.keyProviderOptions, "shared-key", _this.worker) : t1; + } + }; + A.KeySet.prototype = {}; + A.ParticipantKeyHandler.prototype = { + decryptionFailure$0() { + var _this = this, + t1 = _this.keyOptions.failureTolerance; + if (t1 < 0) + return; + if (++_this._decryptionFailureCount > t1) { + $.$get$logger().log$4(B.Level_WARNING_900, "key for " + _this.participantIdentity + " is being marked as invalid", null, null); + _this._hasValidKey = false; + } + }, + exportKey$1(keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.nullable_Uint8List), + $async$returnValue, $async$handler = 2, $async$errorStack = [], $async$self = this, key, e, exception, t1, currentMaterial, $async$exception, $async$temp1; + var $async$exportKey$1 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) { + $async$errorStack.push($async$result); + $async$goto = $async$handler; + } + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self.getKeySet$1(keyIndex); + currentMaterial = t1 == null ? null : t1.material; + if (currentMaterial == null) { + $async$returnValue = null; + // goto return + $async$goto = 1; + break; + } + $async$handler = 4; + t1 = type$.JSObject; + $async$temp1 = type$.NativeByteBuffer; + $async$goto = 7; + return A._asyncAwait(A.promiseToFuture(t1._as(t1._as(t1._as($async$self.worker.crypto).subtle).exportKey("raw", currentMaterial)), type$.nullable_Object), $async$exportKey$1); + case 7: + // returning from await. + key = $async$temp1._as($async$result); + t1 = A.NativeUint8List_NativeUint8List$view(key, 0, null); + $async$returnValue = t1; + // goto return + $async$goto = 1; + break; + $async$handler = 2; + // goto after finally + $async$goto = 6; + break; + case 4: + // catch + $async$handler = 3; + $async$exception = $async$errorStack.pop(); + e = A.unwrapException($async$exception); + $.$get$logger().log$4(B.Level_WARNING_900, "exportKey: " + A.S(e), null, null); + $async$returnValue = null; + // goto return + $async$goto = 1; + break; + // goto after finally + $async$goto = 6; + break; + case 3: + // uncaught + // goto rethrow + $async$goto = 2; + break; + case 6: + // after finally + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + case 2: + // rethrow + return A._asyncRethrow($async$errorStack.at(-1), $async$completer); + } + }); + return A._asyncStartSync($async$exportKey$1, $async$completer); + }, + ratchetKey$1(keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.nullable_Uint8List), + $async$returnValue, $async$self = this, newKey, newKeySet, t1, currentMaterial; + var $async$ratchetKey$1 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = $async$self.getKeySet$1(keyIndex); + currentMaterial = t1 == null ? null : t1.material; + if (currentMaterial == null) { + $async$returnValue = null; + // goto return + $async$goto = 1; + break; + } + t1 = $async$self.keyOptions.ratchetSalt; + $async$goto = 3; + return A._asyncAwait($async$self.ratchet$2(currentMaterial, t1), $async$ratchetKey$1); + case 3: + // returning from await. + newKey = $async$result; + $async$goto = 5; + return A._asyncAwait($async$self.ratchetMaterial$2(currentMaterial, B.NativeUint8List_methods.get$buffer(newKey)), $async$ratchetKey$1); + case 5: + // returning from await. + $async$goto = 4; + return A._asyncAwait($async$self.deriveKeys$2($async$result, t1), $async$ratchetKey$1); + case 4: + // returning from await. + newKeySet = $async$result; + $async$goto = 6; + return A._asyncAwait($async$self.setKeySetFromMaterial$2(newKeySet, keyIndex == null ? $async$self.currentKeyIndex : keyIndex), $async$ratchetKey$1); + case 6: + // returning from await. + $async$returnValue = newKey; + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$ratchetKey$1, $async$completer); + }, + ratchetMaterial$2(currentMaterial, newKeyBuffer) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.JSObject), + $async$returnValue, $async$self = this, t1; + var $async$ratchetMaterial$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = type$.JSObject; + $async$goto = 3; + return A._asyncAwait(A.promiseToFuture(A.callMethod(t1._as(t1._as($async$self.worker.crypto).subtle), "importKey", ["raw", type$.NativeByteBuffer._as(newKeyBuffer), type$.Object._as(t1._as(currentMaterial.algorithm).name), false, type$.JSArray_nullable_Object._as(A.jsify(A._setArrayType(["deriveBits", "deriveKey"], type$.JSArray_String)))], t1), t1), $async$ratchetMaterial$2); + case 3: + // returning from await. + $async$returnValue = $async$result; + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$ratchetMaterial$2, $async$completer); + }, + getKeySet$1(keyIndex) { + var t2, + t1 = this.__ParticipantKeyHandler_cryptoKeyRing_A; + t1 === $ && A.throwLateFieldNI("cryptoKeyRing"); + t2 = keyIndex == null ? this.currentKeyIndex : keyIndex; + if (!(t2 >= 0 && t2 < t1.length)) + return A.ioore(t1, t2); + return t1[t2]; + }, + setKey$2$keyIndex(key, keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, t1, t2, t3; + var $async$setKey$2$keyIndex = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = type$.JSObject; + t2 = t1._as(t1._as($async$self.worker.crypto).subtle); + t3 = type$.String; + t3 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "PBKDF2"], t3, t3)); + if (t3 == null) + t3 = type$.Object._as(t3); + $async$goto = 4; + return A._asyncAwait(A.promiseToFuture(A.callMethod(t2, "importKey", ["raw", key, t3, false, type$.JSArray_nullable_Object._as(A.jsify(A._setArrayType(["deriveBits", "deriveKey"], type$.JSArray_String)))], t1), t1), $async$setKey$2$keyIndex); + case 4: + // returning from await. + $async$goto = 3; + return A._asyncAwait($async$self.deriveKeys$2($async$result, $async$self.keyOptions.ratchetSalt), $async$setKey$2$keyIndex); + case 3: + // returning from await. + $async$goto = 2; + return A._asyncAwait($async$self.setKeySetFromMaterial$2($async$result, keyIndex), $async$setKey$2$keyIndex); + case 2: + // returning from await. + $async$self._decryptionFailureCount = 0; + $async$self._hasValidKey = true; + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$setKey$2$keyIndex, $async$completer); + }, + setKey$1(key) { + return this.setKey$2$keyIndex(key, 0); + }, + setKeySetFromMaterial$2(keySet, keyIndex) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.void), + $async$self = this, t1; + var $async$setKeySetFromMaterial$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + $.$get$logger().log$4(B.Level_CONFIG_700, "setKeySetFromMaterial: set new key, index: " + keyIndex, null, null); + if (keyIndex >= 0) { + t1 = $async$self.__ParticipantKeyHandler_cryptoKeyRing_A; + t1 === $ && A.throwLateFieldNI("cryptoKeyRing"); + $async$self.currentKeyIndex = B.JSInt_methods.$mod(keyIndex, t1.length); + } + t1 = $async$self.__ParticipantKeyHandler_cryptoKeyRing_A; + t1 === $ && A.throwLateFieldNI("cryptoKeyRing"); + B.JSArray_methods.$indexSet(t1, $async$self.currentKeyIndex, keySet); + // implicit return + return A._asyncReturn(null, $async$completer); + } + }); + return A._asyncStartSync($async$setKeySetFromMaterial$2, $async$completer); + }, + deriveKeys$2(material, salt) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.KeySet), + $async$returnValue, $async$self = this, t4, t5, t1, algorithmOptions, t2, t3, $async$temp1, $async$temp2, $async$temp3; + var $async$deriveKeys$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + t1 = type$.JSObject; + algorithmOptions = A.getAlgoOptions(A._asString(t1._as(material.algorithm).name), salt); + t2 = t1._as(t1._as($async$self.worker.crypto).subtle); + t3 = A.jsify(algorithmOptions); + if (t3 == null) + t3 = type$.Object._as(t3); + t4 = type$.Object; + t5 = A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["name", "AES-GCM", "length", 128], type$.String, t4)); + t4 = t5 == null ? t4._as(t5) : t5; + $async$temp1 = A; + $async$temp2 = material; + $async$temp3 = t1; + $async$goto = 3; + return A._asyncAwait(A.promiseToFuture(A.callMethod(t2, "deriveKey", [t3, material, t4, false, type$.JSArray_nullable_Object._as(A.jsify(A._setArrayType(["encrypt", "decrypt"], type$.JSArray_String)))], t1), type$.nullable_Object), $async$deriveKeys$2); + case 3: + // returning from await. + $async$returnValue = new $async$temp1.KeySet($async$temp2, $async$temp3._as($async$result)); + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$deriveKeys$2, $async$completer); + }, + ratchet$2(material, salt) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.Uint8List), + $async$returnValue, $async$self = this, algorithmOptions, t1, t2, t3, $async$temp1; + var $async$ratchet$2 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + algorithmOptions = A.getAlgoOptions("PBKDF2", salt); + t1 = type$.JSObject; + t2 = t1._as(t1._as($async$self.worker.crypto).subtle); + t3 = A.jsify(algorithmOptions); + if (t3 == null) + t3 = type$.Object._as(t3); + $async$temp1 = A; + $async$goto = 3; + return A._asyncAwait(A.promiseToFuture(t1._as(t2.deriveBits(t3, material, 256)), type$.NativeByteBuffer), $async$ratchet$2); + case 3: + // returning from await. + $async$returnValue = $async$temp1.NativeUint8List_NativeUint8List$view($async$result, 0, null); + // goto return + $async$goto = 1; + break; + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$ratchet$2, $async$completer); + }, + set$__ParticipantKeyHandler_cryptoKeyRing_A(__ParticipantKeyHandler_cryptoKeyRing_A) { + this.__ParticipantKeyHandler_cryptoKeyRing_A = type$.List_nullable_KeySet._as(__ParticipantKeyHandler_cryptoKeyRing_A); + } + }; + A.SifGuard.prototype = { + recordUserFrame$0() { + var _this = this; + if (_this.sifSequenceStartedAt == null) + return; + if (++_this.userFramesSinceSif > _this.consecutiveSifCount || Date.now() - _this.lastSifReceivedAt > 2000) + _this.reset$0(); + }, + reset$0() { + this.consecutiveSifCount = this.userFramesSinceSif = 0; + this.sifSequenceStartedAt = null; + } + }; + A.getTrackCryptor_closure.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.unsetCryptorParticipant_closure.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main_closure.prototype = { + call$1(record) { + type$.LogRecord._as(record); + A.printString("[" + record.loggerName + "] " + record.level.name + ": " + record.message); + }, + $signature: 21 + }; + A.main_closure0.prototype = { + call$1($event) { + var t2, transformer, options, kind, participantId, trackId, codec, msgType, keyProviderId, keyProvider, cryptor, _null = null, + t1 = type$.JSObject; + t1._as($event); + t2 = $.$get$logger(); + t2.log$4(B.Level_INFO_800, "Got onrtctransform event", _null, _null); + transformer = t1._as($event.transformer); + transformer.handled = true; + options = t1._as(transformer.options); + kind = A._asString(options.kind); + participantId = A._asString(options.participantId); + trackId = A._asString(options.trackId); + codec = A._asStringQ(options.codec); + msgType = A._asString(options.msgType); + keyProviderId = A._asString(options.keyProviderId); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t2.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, _null, _null); + return; + } + cryptor = A.getTrackCryptor(participantId, trackId, keyProvider); + t2 = t1._as(transformer.readable); + t1 = t1._as(transformer.writable); + cryptor.setupTransform$6$codec$kind$operation$readable$trackId$writable(codec == null ? _null : codec, kind, msgType, t2, trackId, t1); + }, + $signature: 11 + }; + A.main_closure2.prototype = { + call$1(e) { + var $async$goto = 0, + $async$completer = A._makeAsyncAwaitCompleter(type$.Null), + $async$returnValue, options, keyProviderId, t2, t3, t4, t5, t6, t7, t8, keyProviderOptions, enabled, trackId, cryptors, _i, cryptor, kind, exist, participantId, readable, writable, keyProvider, key, keyIndex, newKey, c, sifTrailer, codec, msg, msgType, msgId, t1; + var $async$call$1 = A._wrapJsFunctionForAsync(function($async$errorCode, $async$result) { + if ($async$errorCode === 1) + return A._asyncRethrow($async$result, $async$completer); + while (true) + switch ($async$goto) { + case 0: + // Function start + msg = type$.Map_dynamic_dynamic._as(A.dartify(e.data)); + msgType = msg.$index(0, "msgType"); + msgId = A._asStringQ(msg.$index(0, "msgId")); + t1 = $.$get$logger(); + t1.log$4(B.Level_CONFIG_700, "Got message " + A.S(msgType) + ", msgId " + A.S(msgId), null, null); + case 3: + // switch + switch (msgType) { + case "keyProviderInit": + // goto case + $async$goto = 5; + break; + case "keyProviderDispose": + // goto case + $async$goto = 6; + break; + case "enable": + // goto case + $async$goto = 7; + break; + case "decode": + // goto case + $async$goto = 8; + break; + case "encode": + // goto case + $async$goto = 9; + break; + case "removeTransform": + // goto case + $async$goto = 10; + break; + case "setKey": + // goto case + $async$goto = 11; + break; + case "setSharedKey": + // goto case + $async$goto = 12; + break; + case "ratchetKey": + // goto case + $async$goto = 13; + break; + case "ratchetSharedKey": + // goto case + $async$goto = 14; + break; + case "setKeyIndex": + // goto case + $async$goto = 15; + break; + case "exportKey": + // goto case + $async$goto = 16; + break; + case "exportSharedKey": + // goto case + $async$goto = 17; + break; + case "setSifTrailer": + // goto case + $async$goto = 18; + break; + case "updateCodec": + // goto case + $async$goto = 19; + break; + case "dispose": + // goto case + $async$goto = 20; + break; + default: + // goto default + $async$goto = 21; + break; + } + break; + case 5: + // case + options = msg.$index(0, "keyOptions"); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + t2 = J.getInterceptor$asx(options); + t3 = A._asBool(t2.$index(options, "sharedKey")); + t4 = new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(t2.$index(options, "ratchetSalt"))))); + t5 = A._asInt(t2.$index(options, "ratchetWindowSize")); + t6 = t2.$index(options, "failureTolerance"); + t6 = A._asInt(t6 == null ? -1 : t6); + t7 = t2.$index(options, "uncryptedMagicBytes") != null ? new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(t2.$index(options, "uncryptedMagicBytes"))))) : null; + t8 = t2.$index(options, "keyRingSize"); + t8 = A._asInt(t8 == null ? 16 : t8); + t2 = t2.$index(options, "discardFrameWhenCryptorNotReady"); + keyProviderOptions = new A.KeyOptions(t3, t4, t5, t6, t7, t8, A._asBool(t2 == null ? false : t2)); + t1.log$4(B.Level_CONFIG_700, "Init with keyProviderOptions:\n " + keyProviderOptions.toString$0(0), null, null); + t1 = self; + t2 = type$.JSObject; + t3 = t2._as(t1.self); + t4 = type$.String; + t5 = new Uint8Array(0); + $.keyProviders.$indexSet(0, keyProviderId, new A.KeyProvider(t3, keyProviderOptions, A.LinkedHashMap_LinkedHashMap$_empty(t4, type$.ParticipantKeyHandler), t5)); + t2._as(t1.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "init", "msgId", msgId, "msgType", "response"], t4, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 6: + // case + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + t1.log$4(B.Level_CONFIG_700, "Dispose keyProvider " + keyProviderId, null, null); + $.keyProviders.remove$1(0, keyProviderId); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "dispose", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 7: + // case + enabled = A._asBool(msg.$index(0, "enabled")); + trackId = A._asString(msg.$index(0, "trackId")); + t2 = $.participantCryptors; + t3 = A._arrayInstanceType(t2); + t4 = t3._eval$1("WhereIterable<1>"); + cryptors = A.List_List$of(new A.WhereIterable(t2, t3._eval$1("bool(1)")._as(new A.main__closure(trackId)), t4), true, t4._eval$1("Iterable.E")); + for (t2 = cryptors.length, t3 = "" + enabled, t4 = "Set enable " + t3 + " for trackId ", t5 = "setEnabled[" + t3 + string$.___las, _i = 0; _i < t2; ++_i) { + cryptor = cryptors[_i]; + t1.log$4(B.Level_CONFIG_700, t4 + cryptor.trackId, null, null); + if (cryptor.lastError !== B.CryptorError_1) { + t1.log$4(B.Level_INFO_800, t5, null, null); + cryptor.lastError = B.CryptorError_0; + } + t1.log$4(B.Level_CONFIG_700, "setEnabled for " + A.S(cryptor.participantIdentity) + ", enabled: " + t3, null, null); + cryptor._enabled = enabled; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorEnabled", "enable", enabled, "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_Object))); + // goto after switch + $async$goto = 4; + break; + case 8: + // case + case 9: + // case + kind = msg.$index(0, "kind"); + exist = A._asBool(msg.$index(0, "exist")); + participantId = A._asString(msg.$index(0, "participantId")); + trackId = msg.$index(0, "trackId"); + t2 = type$.JSObject; + readable = t2._as(msg.$index(0, "readableStream")); + writable = t2._as(msg.$index(0, "writableStream")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + t1.log$4(B.Level_CONFIG_700, "SetupTransform for kind " + A.S(kind) + ", trackId " + A.S(trackId) + ", participantId " + participantId + ", " + J.get$runtimeType$(readable).toString$0(0) + " " + J.get$runtimeType$(writable).toString$0(0) + "}", null, null); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + t2._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorSetup", "participantId", participantId, "trackId", trackId, "exist", exist, "operation", msgType, "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto return + $async$goto = 1; + break; + } + A._asString(trackId); + cryptor = A.getTrackCryptor(participantId, trackId, keyProvider); + A._asString(msgType); + $async$goto = 22; + return A._asyncAwait(cryptor.setupTransform$5$kind$operation$readable$trackId$writable(A._asString(kind), msgType, readable, trackId, writable), $async$call$1); + case 22: + // returning from await. + t2._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorSetup", "participantId", participantId, "trackId", trackId, "exist", exist, "operation", msgType, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + cryptor.lastError = B.CryptorError_0; + // goto after switch + $async$goto = 4; + break; + case 10: + // case + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Removing trackId " + trackId, null, null); + A.unsetCryptorParticipant(trackId); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorRemoved", "trackId", trackId, "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 11: + // case + case 12: + // case + key = new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(msg.$index(0, "key"))))); + keyIndex = A._asInt(msg.$index(0, "keyIndex")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + t2 = keyProvider.keyProviderOptions.sharedKey; + t3 = "" + keyIndex; + $async$goto = t2 ? 23 : 25; + break; + case 23: + // then + t1.log$4(B.Level_CONFIG_700, "Set SharedKey keyIndex " + t3, null, null); + t1.log$4(B.Level_INFO_800, "setting shared key", null, null); + keyProvider.sharedKey = key; + keyProvider.getSharedKeyHandler$0().setKey$2$keyIndex(key, keyIndex); + // goto join + $async$goto = 24; + break; + case 25: + // else + participantId = A._asString(msg.$index(0, "participantId")); + t1.log$4(B.Level_CONFIG_700, "Set key for participant " + participantId + ", keyIndex " + t3, null, null); + $async$goto = 26; + return A._asyncAwait(keyProvider.getParticipantKeyHandler$1(participantId).setKey$2$keyIndex(key, keyIndex), $async$call$1); + case 26: + // returning from await. + case 24: + // join + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "participantId", msg.$index(0, "participantId"), "sharedKey", t2, "keyIndex", keyIndex, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto after switch + $async$goto = 4; + break; + case 13: + // case + case 14: + // case + keyIndex = msg.$index(0, "keyIndex"); + participantId = A._asString(msg.$index(0, "participantId")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + t2 = keyProvider.keyProviderOptions.sharedKey; + $async$goto = t2 ? 27 : 29; + break; + case 27: + // then + t1.log$4(B.Level_CONFIG_700, "RatchetKey for SharedKey, keyIndex " + A.S(keyIndex), null, null); + $async$goto = 30; + return A._asyncAwait(keyProvider.getSharedKeyHandler$0().ratchetKey$1(A._asIntQ(keyIndex)), $async$call$1); + case 30: + // returning from await. + newKey = $async$result; + // goto join + $async$goto = 28; + break; + case 29: + // else + t1.log$4(B.Level_CONFIG_700, "RatchetKey for participant " + participantId + ", keyIndex " + A.S(keyIndex), null, null); + $async$goto = 31; + return A._asyncAwait(keyProvider.getParticipantKeyHandler$1(participantId).ratchetKey$1(A._asIntQ(keyIndex)), $async$call$1); + case 31: + // returning from await. + newKey = $async$result; + case 28: + // join + t1 = type$.JSObject._as(self.self); + t1.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "ratchetKey", "sharedKey", t2, "participantId", participantId, "newKey", newKey != null ? B.C_Base64Encoder.convert$1(type$.Base64Codec._eval$1("Codec.S")._as(newKey)) : "", "keyIndex", keyIndex, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto after switch + $async$goto = 4; + break; + case 15: + // case + keyIndex = msg.$index(0, "index"); + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Setup key index for track " + trackId, null, null); + t2 = $.participantCryptors; + t3 = A._arrayInstanceType(t2); + t4 = t3._eval$1("WhereIterable<1>"); + cryptors = A.List_List$of(new A.WhereIterable(t2, t3._eval$1("bool(1)")._as(new A.main__closure0(trackId)), t4), true, t4._eval$1("Iterable.E")); + for (t2 = cryptors.length, _i = 0; _i < t2; ++_i) { + c = cryptors[_i]; + t1.log$4(B.Level_CONFIG_700, "Set keyIndex for trackId " + c.trackId, null, null); + A._asInt(keyIndex); + if (c.lastError !== B.CryptorError_1) { + t1.log$4(B.Level_INFO_800, "setKeyIndex: lastError != CryptorError.kOk, reset state to kNew", null, null); + c.lastError = B.CryptorError_0; + } + t1.log$4(B.Level_CONFIG_700, "setKeyIndex for " + A.S(c.participantIdentity) + ", newIndex: " + keyIndex, null, null); + c.currentKeyIndex = keyIndex; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKeyIndex", "keyIndex", keyIndex, "msgId", msgId, "msgType", "response"], type$.String, type$.dynamic))); + // goto after switch + $async$goto = 4; + break; + case 16: + // case + case 17: + // case + keyIndex = A._asInt(msg.$index(0, "keyIndex")); + participantId = A._asString(msg.$index(0, "participantId")); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + t2 = "" + keyIndex; + $async$goto = keyProvider.keyProviderOptions.sharedKey ? 32 : 34; + break; + case 32: + // then + t1.log$4(B.Level_CONFIG_700, "Export SharedKey keyIndex " + t2, null, null); + $async$goto = 35; + return A._asyncAwait(keyProvider.getSharedKeyHandler$0().exportKey$1(keyIndex), $async$call$1); + case 35: + // returning from await. + key = $async$result; + // goto join + $async$goto = 33; + break; + case 34: + // else + t1.log$4(B.Level_CONFIG_700, "Export key for participant " + participantId + ", keyIndex " + t2, null, null); + $async$goto = 36; + return A._asyncAwait(keyProvider.getParticipantKeyHandler$1(participantId).exportKey$1(keyIndex), $async$call$1); + case 36: + // returning from await. + key = $async$result; + case 33: + // join + t1 = type$.JSObject._as(self.self); + t1.postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "exportKey", "participantId", participantId, "keyIndex", keyIndex, "exportedKey", key != null ? B.C_Base64Encoder.convert$1(type$.Base64Codec._eval$1("Codec.S")._as(key)) : "", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_Object))); + // goto after switch + $async$goto = 4; + break; + case 18: + // case + sifTrailer = new Uint8Array(A._ensureNativeList(B.C_Base64Decoder.convert$1(A._asString(msg.$index(0, "sifTrailer"))))); + keyProviderId = A._asString(msg.$index(0, "keyProviderId")); + keyProvider = $.keyProviders.$index(0, keyProviderId); + if (keyProvider == null) { + t1.log$4(B.Level_WARNING_900, "KeyProvider not found for " + keyProviderId, null, null); + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setKey", "error", "KeyProvider not found", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto return + $async$goto = 1; + break; + } + keyProvider.keyProviderOptions.uncryptedMagicBytes = sifTrailer; + t1.log$4(B.Level_CONFIG_700, "SetSifTrailer = " + A.S(sifTrailer), null, null); + for (t2 = $.participantCryptors, t3 = t2.length, _i = 0; _i < t2.length; t2.length === t3 || (0, A.throwConcurrentModificationError)(t2), ++_i) { + c = t2[_i]; + t1.log$4(B.Level_CONFIG_700, "setSifTrailer for " + A.S(c.participantIdentity) + ", magicBytes: " + A.S(sifTrailer), null, null); + c.keyHandler.keyOptions.uncryptedMagicBytes = sifTrailer; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "setSifTrailer", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 19: + // case + codec = A._asString(msg.$index(0, "codec")); + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Update codec for trackId " + trackId + ", codec " + codec, null, null); + cryptor = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.main__closure1(trackId), type$.FrameCryptor); + if (cryptor != null) { + if (cryptor.lastError !== B.CryptorError_1) { + t1.log$4(B.Level_INFO_800, "updateCodec[" + codec + string$.___las, null, null); + cryptor.lastError = B.CryptorError_0; + } + t1.log$4(B.Level_CONFIG_700, "updateCodec for " + A.S(cryptor.participantIdentity) + ", codec: " + codec, null, null); + cryptor.codec = codec; + } + type$.JSObject._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "updateCodec", "msgId", msgId, "msgType", "response"], type$.String, type$.nullable_String))); + // goto after switch + $async$goto = 4; + break; + case 20: + // case + trackId = A._asString(msg.$index(0, "trackId")); + t1.log$4(B.Level_CONFIG_700, "Dispose for trackId " + trackId, null, null); + cryptor = A.IterableExtension_firstWhereOrNull($.participantCryptors, new A.main__closure2(trackId), type$.FrameCryptor); + t1 = type$.JSObject; + t2 = type$.String; + t3 = type$.nullable_String; + if (cryptor != null) { + cryptor.lastError = B.CryptorError_8; + t1._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorDispose", "participantId", cryptor.participantIdentity, "trackId", trackId, "msgId", msgId, "msgType", "response"], t2, t3))); + } else + t1._as(self.self).postMessage(A.jsify(A.LinkedHashMap_LinkedHashMap$_literal(["type", "cryptorDispose", "error", "cryptor not found", "msgId", msgId, "msgType", "response"], t2, t3))); + // goto after switch + $async$goto = 4; + break; + case 21: + // default + t1.log$4(B.Level_WARNING_900, "Unknown message kind " + msg.toString$0(0), null, null); + case 4: + // after switch + case 1: + // return + return A._asyncReturn($async$returnValue, $async$completer); + } + }); + return A._asyncStartSync($async$call$1, $async$completer); + }, + $signature: 22 + }; + A.main__closure.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main__closure0.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main__closure1.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main__closure2.prototype = { + call$1(c) { + return type$.FrameCryptor._as(c).trackId === this.trackId; + }, + $signature: 1 + }; + A.main_closure1.prototype = { + call$1(e) { + this.handleMessage.call$1(type$.JSObject._as(e)); + }, + $signature: 11 + }; + A.Level.prototype = { + $eq(_, other) { + if (other == null) + return false; + return other instanceof A.Level && this.value === other.value; + }, + get$hashCode(_) { + return this.value; + }, + toString$0(_) { + return this.name; + } + }; + A.LogRecord.prototype = { + toString$0(_) { + return "[" + this.level.name + "] " + this.loggerName + ": " + this.message; + } + }; + A.Logger.prototype = { + get$fullName() { + var t1 = this.parent, + t2 = t1 == null ? null : t1.name.length !== 0, + t3 = this.name; + return t2 === true ? t1.get$fullName() + "." + t3 : t3; + }, + get$level() { + var t1, effectiveLevel; + if (this.parent == null) { + t1 = this._level; + t1.toString; + effectiveLevel = t1; + } else { + t1 = $.$get$Logger_root()._level; + t1.toString; + effectiveLevel = t1; + } + return effectiveLevel; + }, + log$4(logLevel, message, error, stackTrace) { + var record, _this = this, + t1 = logLevel.value; + if (t1 >= _this.get$level().value) { + if (t1 >= 2000) { + A.StackTrace_current(); + logLevel.toString$0(0); + } + t1 = _this.get$fullName(); + Date.now(); + $.LogRecord__nextNumber = $.LogRecord__nextNumber + 1; + record = new A.LogRecord(logLevel, message, t1); + if (_this.parent == null) + _this._publish$1(record); + else + $.$get$Logger_root()._publish$1(record); + } + }, + _getStream$0() { + if (this.parent == null) { + var t1 = this._controller; + if (t1 == null) { + t1 = new A._SyncBroadcastStreamController(null, null, type$._SyncBroadcastStreamController_LogRecord); + this.set$_controller(t1); + } + return new A._BroadcastStream(t1, A._instanceType(t1)._eval$1("_BroadcastStream<1>")); + } else + return $.$get$Logger_root()._getStream$0(); + }, + _publish$1(record) { + var t1 = this._controller; + if (t1 != null) { + A._instanceType(t1)._precomputed1._as(record); + if (!t1.get$_mayAddEvent()) + A.throwExpression(t1._addEventError$0()); + t1._sendData$1(record); + } + return null; + }, + set$_controller(_controller) { + this._controller = type$.nullable_StreamController_LogRecord._as(_controller); + } + }; + A.Logger_Logger_closure.prototype = { + call$0() { + var dot, $parent, t1, + thisName = this.name; + if (B.JSString_methods.startsWith$1(thisName, ".")) + A.throwExpression(A.ArgumentError$("name shouldn't start with a '.'", null)); + if (B.JSString_methods.endsWith$1(thisName, ".")) + A.throwExpression(A.ArgumentError$("name shouldn't end with a '.'", null)); + dot = B.JSString_methods.lastIndexOf$1(thisName, "."); + if (dot === -1) + $parent = thisName !== "" ? A.Logger_Logger("") : null; + else { + $parent = A.Logger_Logger(B.JSString_methods.substring$2(thisName, 0, dot)); + thisName = B.JSString_methods.substring$1(thisName, dot + 1); + } + t1 = new A.Logger(thisName, $parent, A.LinkedHashMap_LinkedHashMap$_empty(type$.String, type$.Logger)); + if ($parent == null) + t1._level = B.Level_INFO_800; + else + $parent._children.$indexSet(0, thisName, t1); + return t1; + }, + $signature: 23 + }; + (function aliases() { + var _ = J.LegacyJavaScriptObject.prototype; + _.super$LegacyJavaScriptObject$toString = _.toString$0; + _ = A._BroadcastStreamController.prototype; + _.super$_BroadcastStreamController$_addEventError = _._addEventError$0; + })(); + (function installTearOffs() { + var _static_1 = hunkHelpers._static_1, + _static_0 = hunkHelpers._static_0, + _static_2 = hunkHelpers._static_2, + _instance_2_u = hunkHelpers._instance_2u, + _instance_0_u = hunkHelpers._instance_0u; + _static_1(A, "async__AsyncRun__scheduleImmediateJsOverride$closure", "_AsyncRun__scheduleImmediateJsOverride", 4); + _static_1(A, "async__AsyncRun__scheduleImmediateWithSetImmediate$closure", "_AsyncRun__scheduleImmediateWithSetImmediate", 4); + _static_1(A, "async__AsyncRun__scheduleImmediateWithTimer$closure", "_AsyncRun__scheduleImmediateWithTimer", 4); + _static_0(A, "async___startMicrotaskLoop$closure", "_startMicrotaskLoop", 0); + _static_2(A, "async___nullErrorHandler$closure", "_nullErrorHandler", 6); + _static_0(A, "async___nullDoneHandler$closure", "_nullDoneHandler", 0); + _instance_2_u(A._Future.prototype, "get$_completeError", "_completeError$2", 6); + _instance_0_u(A._DoneStreamSubscription.prototype, "get$_onMicrotask", "_onMicrotask$0", 0); + var _; + _instance_2_u(_ = A.FrameCryptor.prototype, "get$encodeFunction", "encodeFunction$2", 9); + _instance_2_u(_, "get$decodeFunction", "decodeFunction$2", 9); + })(); + (function inheritance() { + var _mixin = hunkHelpers.mixin, + _inherit = hunkHelpers.inherit, + _inheritMany = hunkHelpers.inheritMany; + _inherit(A.Object, null); + _inheritMany(A.Object, [A.JS_CONST, J.Interceptor, J.ArrayIterator, A._CopyingBytesBuilder, A.Error, A.SentinelValue, A.Iterable, A.ListIterator, A.MappedIterator, A.WhereIterator, A.FixedLengthListMixin, A.Symbol, A.MapView, A.ConstantMap, A._KeysOrValuesOrElementsIterator, A.JSInvocationMirror, A.Closure, A.TypeErrorDecoder, A.NullThrownFromJavaScriptException, A.ExceptionAndStackTrace, A._StackTrace, A._Required, A.MapBase, A.LinkedHashMapCell, A.LinkedHashMapKeyIterator, A._UnmodifiableNativeByteBufferView, A.Rti, A._FunctionParameters, A._Type, A._TimerImpl, A._AsyncAwaitCompleter, A.AsyncError, A.Stream, A._BufferingStreamSubscription, A._BroadcastStreamController, A._Completer, A._FutureListener, A._Future, A._AsyncCallbackEntry, A._DelayedEvent, A._PendingEvents, A._DoneStreamSubscription, A._StreamIterator, A._Zone, A._HashMapKeyIterator, A.ListBase, A._UnmodifiableMapMixin, A.Codec, A.Converter, A._Base64Encoder, A._Base64Decoder, A.DateTime, A._Enum, A.OutOfMemoryError, A.StackOverflowError, A._Exception, A.FormatException, A.Null, A._StringStackTrace, A.StringBuffer, A.NullRejectionException, A._JSSecureRandom, A.FrameInfo, A.FrameCryptor, A.KeyOptions, A.KeyProvider, A.KeySet, A.ParticipantKeyHandler, A.SifGuard, A.Level, A.LogRecord, A.Logger]); + _inheritMany(J.Interceptor, [J.JSBool, J.JSNull, J.JavaScriptObject, J.JavaScriptBigInt, J.JavaScriptSymbol, J.JSNumber, J.JSString]); + _inheritMany(J.JavaScriptObject, [J.LegacyJavaScriptObject, J.JSArray, A.NativeByteBuffer, A.NativeTypedData]); + _inheritMany(J.LegacyJavaScriptObject, [J.PlainJavaScriptObject, J.UnknownJavaScriptObject, J.JavaScriptFunction]); + _inherit(J.JSUnmodifiableArray, J.JSArray); + _inheritMany(J.JSNumber, [J.JSInt, J.JSNumNotInt]); + _inheritMany(A.Error, [A.LateError, A.TypeError, A.JsNoSuchMethodError, A.UnknownJsTypeError, A._CyclicInitializationError, A.RuntimeError, A.AssertionError, A._Error, A.ArgumentError, A.NoSuchMethodError, A.UnsupportedError, A.UnimplementedError, A.StateError, A.ConcurrentModificationError]); + _inheritMany(A.Iterable, [A.EfficientLengthIterable, A.MappedIterable, A.WhereIterable, A._KeysOrValues]); + _inheritMany(A.EfficientLengthIterable, [A.ListIterable, A.LinkedHashMapKeysIterable, A._HashMapKeyIterable]); + _inherit(A.EfficientLengthMappedIterable, A.MappedIterable); + _inherit(A.MappedListIterable, A.ListIterable); + _inherit(A._UnmodifiableMapView_MapView__UnmodifiableMapMixin, A.MapView); + _inherit(A.UnmodifiableMapView, A._UnmodifiableMapView_MapView__UnmodifiableMapMixin); + _inherit(A.ConstantMapView, A.UnmodifiableMapView); + _inherit(A.ConstantStringMap, A.ConstantMap); + _inheritMany(A.Closure, [A.Closure2Args, A.Closure0Args, A.TearOffClosure, A.initHooks_closure, A.initHooks_closure1, A._AsyncRun__initializeScheduleImmediate_internalCallback, A._AsyncRun__initializeScheduleImmediate_closure, A._awaitOnObject_closure, A._SyncBroadcastStreamController__sendData_closure, A._Future__chainForeignFuture_closure, A._Future__propagateToListeners_handleWhenCompleteCallback_closure, A.Stream_length_closure, A.jsify__convert, A.promiseToFuture_closure, A.promiseToFuture_closure0, A.dartify_convert, A.getTrackCryptor_closure, A.unsetCryptorParticipant_closure, A.main_closure, A.main_closure0, A.main_closure2, A.main__closure, A.main__closure0, A.main__closure1, A.main__closure2, A.main_closure1]); + _inheritMany(A.Closure2Args, [A.Primitives_functionNoSuchMethod_closure, A.initHooks_closure0, A._awaitOnObject_closure0, A._wrapJsFunctionForAsync_closure, A._Future__chainForeignFuture_closure0, A._Future__propagateToListeners_handleWhenCompleteCallback_closure0, A.MapBase_mapToString_closure, A.NoSuchMethodError_toString_closure]); + _inherit(A.NullError, A.TypeError); + _inheritMany(A.TearOffClosure, [A.StaticClosure, A.BoundClosure]); + _inherit(A._AssertionError, A.AssertionError); + _inheritMany(A.MapBase, [A.JsLinkedHashMap, A._HashMap]); + _inheritMany(A.NativeTypedData, [A.NativeByteData, A.NativeTypedArray]); + _inheritMany(A.NativeTypedArray, [A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin, A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin]); + _inherit(A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin, A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin); + _inherit(A.NativeTypedArrayOfDouble, A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin); + _inherit(A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin, A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin); + _inherit(A.NativeTypedArrayOfInt, A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin); + _inheritMany(A.NativeTypedArrayOfDouble, [A.NativeFloat32List, A.NativeFloat64List]); + _inheritMany(A.NativeTypedArrayOfInt, [A.NativeInt16List, A.NativeInt32List, A.NativeInt8List, A.NativeUint16List, A.NativeUint32List, A.NativeUint8ClampedList, A.NativeUint8List]); + _inherit(A._TypeError, A._Error); + _inheritMany(A.Closure0Args, [A._AsyncRun__scheduleImmediateJsOverride_internalCallback, A._AsyncRun__scheduleImmediateWithSetImmediate_internalCallback, A._TimerImpl_internalCallback, A._Future__addListener_closure, A._Future__prependListeners_closure, A._Future__chainForeignFuture_closure1, A._Future__chainCoreFuture_closure, A._Future__asyncCompleteWithValue_closure, A._Future__asyncCompleteError_closure, A._Future__propagateToListeners_handleWhenCompleteCallback, A._Future__propagateToListeners_handleValueCallback, A._Future__propagateToListeners_handleError, A.Stream_length_closure0, A._PendingEvents_schedule_closure, A._rootHandleError_closure, A._RootZone_bindCallbackGuarded_closure, A.FrameCryptor_decodeFunction_decryptFrameInternal, A.FrameCryptor_decodeFunction_ratchedKeyInternal, A.Logger_Logger_closure]); + _inherit(A._StreamImpl, A.Stream); + _inherit(A._ControllerStream, A._StreamImpl); + _inherit(A._BroadcastStream, A._ControllerStream); + _inherit(A._ControllerSubscription, A._BufferingStreamSubscription); + _inherit(A._BroadcastSubscription, A._ControllerSubscription); + _inherit(A._SyncBroadcastStreamController, A._BroadcastStreamController); + _inherit(A._AsyncCompleter, A._Completer); + _inherit(A._DelayedData, A._DelayedEvent); + _inherit(A._RootZone, A._Zone); + _inherit(A._IdentityHashMap, A._HashMap); + _inherit(A.Base64Codec, A.Codec); + _inheritMany(A.Converter, [A.Base64Encoder, A.Base64Decoder]); + _inheritMany(A.ArgumentError, [A.RangeError, A.IndexError]); + _inherit(A.CryptorError, A._Enum); + _mixin(A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin, A.ListBase); + _mixin(A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin, A.FixedLengthListMixin); + _mixin(A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin, A.ListBase); + _mixin(A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin, A.FixedLengthListMixin); + _mixin(A._UnmodifiableMapView_MapView__UnmodifiableMapMixin, A._UnmodifiableMapMixin); + })(); + var init = { + typeUniverse: {eC: new Map(), tR: {}, eT: {}, tPV: {}, sEA: []}, + mangledGlobalNames: {int: "int", double: "double", num: "num", String: "String", bool: "bool", Null: "Null", List: "List", Object: "Object", Map: "Map"}, + mangledNames: {}, + types: ["~()", "bool(FrameCryptor)", "Null(@)", "~(@)", "~(~())", "Null()", "~(Object,StackTrace)", "Null(Object,StackTrace)", "Object?(Object?)", "Future<~>(JSObject,JSObject)", "Future<~>()", "Null(JSObject)", "~(String,@)", "@(@)", "@(@,String)", "@(String)", "Null(~())", "Null(@,StackTrace)", "~(int,@)", "~(Object?,Object?)", "~(Symbol0,@)", "~(LogRecord)", "Future(JSObject)", "Logger()"], + interceptorsByTag: null, + leafTags: null, + arrayRti: Symbol("$ti") + }; + A._Universe_addRules(init.typeUniverse, JSON.parse('{"JavaScriptFunction":"LegacyJavaScriptObject","PlainJavaScriptObject":"LegacyJavaScriptObject","UnknownJavaScriptObject":"LegacyJavaScriptObject","JSBool":{"bool":[],"TrustedGetRuntimeType":[]},"JSNull":{"Null":[],"TrustedGetRuntimeType":[]},"JavaScriptObject":{"JSObject":[]},"LegacyJavaScriptObject":{"JSObject":[]},"JSArray":{"List":["1"],"EfficientLengthIterable":["1"],"JSObject":[],"Iterable":["1"]},"JSUnmodifiableArray":{"JSArray":["1"],"List":["1"],"EfficientLengthIterable":["1"],"JSObject":[],"Iterable":["1"]},"ArrayIterator":{"Iterator":["1"]},"JSNumber":{"double":[],"num":[]},"JSInt":{"double":[],"int":[],"num":[],"TrustedGetRuntimeType":[]},"JSNumNotInt":{"double":[],"num":[],"TrustedGetRuntimeType":[]},"JSString":{"String":[],"Pattern":[],"TrustedGetRuntimeType":[]},"_CopyingBytesBuilder":{"BytesBuilder":[]},"LateError":{"Error":[]},"EfficientLengthIterable":{"Iterable":["1"]},"ListIterable":{"EfficientLengthIterable":["1"],"Iterable":["1"]},"ListIterator":{"Iterator":["1"]},"MappedIterable":{"Iterable":["2"],"Iterable.E":"2"},"EfficientLengthMappedIterable":{"MappedIterable":["1","2"],"EfficientLengthIterable":["2"],"Iterable":["2"],"Iterable.E":"2"},"MappedIterator":{"Iterator":["2"]},"MappedListIterable":{"ListIterable":["2"],"EfficientLengthIterable":["2"],"Iterable":["2"],"Iterable.E":"2","ListIterable.E":"2"},"WhereIterable":{"Iterable":["1"],"Iterable.E":"1"},"WhereIterator":{"Iterator":["1"]},"Symbol":{"Symbol0":[]},"ConstantMapView":{"UnmodifiableMapView":["1","2"],"_UnmodifiableMapView_MapView__UnmodifiableMapMixin":["1","2"],"MapView":["1","2"],"_UnmodifiableMapMixin":["1","2"],"Map":["1","2"]},"ConstantMap":{"Map":["1","2"]},"ConstantStringMap":{"ConstantMap":["1","2"],"Map":["1","2"]},"_KeysOrValues":{"Iterable":["1"],"Iterable.E":"1"},"_KeysOrValuesOrElementsIterator":{"Iterator":["1"]},"JSInvocationMirror":{"Invocation":[]},"NullError":{"TypeError":[],"Error":[]},"JsNoSuchMethodError":{"Error":[]},"UnknownJsTypeError":{"Error":[]},"_StackTrace":{"StackTrace":[]},"Closure":{"Function":[]},"Closure0Args":{"Function":[]},"Closure2Args":{"Function":[]},"TearOffClosure":{"Function":[]},"StaticClosure":{"Function":[]},"BoundClosure":{"Function":[]},"_CyclicInitializationError":{"Error":[]},"RuntimeError":{"Error":[]},"_AssertionError":{"Error":[]},"JsLinkedHashMap":{"MapBase":["1","2"],"LinkedHashMap":["1","2"],"Map":["1","2"]},"LinkedHashMapKeysIterable":{"EfficientLengthIterable":["1"],"Iterable":["1"],"Iterable.E":"1"},"LinkedHashMapKeyIterator":{"Iterator":["1"]},"NativeByteBuffer":{"JSObject":[],"ByteBuffer":[],"TrustedGetRuntimeType":[]},"NativeTypedData":{"JSObject":[]},"_UnmodifiableNativeByteBufferView":{"ByteBuffer":[]},"NativeByteData":{"ByteData":[],"JSObject":[],"TrustedGetRuntimeType":[]},"NativeTypedArray":{"JavaScriptIndexingBehavior":["1"],"JSObject":[]},"NativeTypedArrayOfDouble":{"ListBase":["double"],"NativeTypedArray":["double"],"List":["double"],"JavaScriptIndexingBehavior":["double"],"EfficientLengthIterable":["double"],"JSObject":[],"Iterable":["double"],"FixedLengthListMixin":["double"]},"NativeTypedArrayOfInt":{"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"]},"NativeFloat32List":{"Float32List":[],"ListBase":["double"],"NativeTypedArray":["double"],"List":["double"],"JavaScriptIndexingBehavior":["double"],"EfficientLengthIterable":["double"],"JSObject":[],"Iterable":["double"],"FixedLengthListMixin":["double"],"TrustedGetRuntimeType":[],"ListBase.E":"double"},"NativeFloat64List":{"Float64List":[],"ListBase":["double"],"NativeTypedArray":["double"],"List":["double"],"JavaScriptIndexingBehavior":["double"],"EfficientLengthIterable":["double"],"JSObject":[],"Iterable":["double"],"FixedLengthListMixin":["double"],"TrustedGetRuntimeType":[],"ListBase.E":"double"},"NativeInt16List":{"Int16List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeInt32List":{"Int32List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeInt8List":{"Int8List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint16List":{"Uint16List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint32List":{"Uint32List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint8ClampedList":{"Uint8ClampedList":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"NativeUint8List":{"Uint8List":[],"ListBase":["int"],"NativeTypedArray":["int"],"List":["int"],"JavaScriptIndexingBehavior":["int"],"EfficientLengthIterable":["int"],"JSObject":[],"Iterable":["int"],"FixedLengthListMixin":["int"],"TrustedGetRuntimeType":[],"ListBase.E":"int"},"_Error":{"Error":[]},"_TypeError":{"TypeError":[],"Error":[]},"_BufferingStreamSubscription":{"StreamSubscription":["1"],"_EventDispatch":["1"]},"AsyncError":{"Error":[]},"_BroadcastStream":{"_ControllerStream":["1"],"_StreamImpl":["1"],"Stream":["1"]},"_BroadcastSubscription":{"_ControllerSubscription":["1"],"_BufferingStreamSubscription":["1"],"StreamSubscription":["1"],"_EventDispatch":["1"]},"_BroadcastStreamController":{"StreamController":["1"],"_StreamControllerLifecycle":["1"],"_EventDispatch":["1"]},"_SyncBroadcastStreamController":{"_BroadcastStreamController":["1"],"StreamController":["1"],"_StreamControllerLifecycle":["1"],"_EventDispatch":["1"]},"_AsyncCompleter":{"_Completer":["1"]},"_Future":{"Future":["1"]},"_ControllerStream":{"_StreamImpl":["1"],"Stream":["1"]},"_ControllerSubscription":{"_BufferingStreamSubscription":["1"],"StreamSubscription":["1"],"_EventDispatch":["1"]},"_StreamImpl":{"Stream":["1"]},"_DelayedData":{"_DelayedEvent":["1"]},"_DoneStreamSubscription":{"StreamSubscription":["1"]},"_Zone":{"Zone":[]},"_RootZone":{"_Zone":[],"Zone":[]},"_HashMap":{"MapBase":["1","2"],"Map":["1","2"]},"_IdentityHashMap":{"_HashMap":["1","2"],"MapBase":["1","2"],"Map":["1","2"]},"_HashMapKeyIterable":{"EfficientLengthIterable":["1"],"Iterable":["1"],"Iterable.E":"1"},"_HashMapKeyIterator":{"Iterator":["1"]},"MapBase":{"Map":["1","2"]},"MapView":{"Map":["1","2"]},"UnmodifiableMapView":{"_UnmodifiableMapView_MapView__UnmodifiableMapMixin":["1","2"],"MapView":["1","2"],"_UnmodifiableMapMixin":["1","2"],"Map":["1","2"]},"Base64Codec":{"Codec":["List","String"],"Codec.S":"List"},"double":{"num":[]},"int":{"num":[]},"List":{"EfficientLengthIterable":["1"],"Iterable":["1"]},"String":{"Pattern":[]},"AssertionError":{"Error":[]},"TypeError":{"Error":[]},"ArgumentError":{"Error":[]},"RangeError":{"Error":[]},"IndexError":{"Error":[]},"NoSuchMethodError":{"Error":[]},"UnsupportedError":{"Error":[]},"UnimplementedError":{"Error":[]},"StateError":{"Error":[]},"ConcurrentModificationError":{"Error":[]},"OutOfMemoryError":{"Error":[]},"StackOverflowError":{"Error":[]},"_StringStackTrace":{"StackTrace":[]},"Int8List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint8List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint8ClampedList":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Int16List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint16List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Int32List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Uint32List":{"List":["int"],"EfficientLengthIterable":["int"],"Iterable":["int"]},"Float32List":{"List":["double"],"EfficientLengthIterable":["double"],"Iterable":["double"]},"Float64List":{"List":["double"],"EfficientLengthIterable":["double"],"Iterable":["double"]}}')); + A._Universe_addErasedTypes(init.typeUniverse, JSON.parse('{"EfficientLengthIterable":1,"NativeTypedArray":1,"_DelayedEvent":1,"Converter":2}')); + var string$ = { + Cannot: "Cannot fire new event. Controller is already firing an event", + Error_: "Error handler must accept one Object or one Object and a StackTrace as arguments, and return a value of the returned future's type", + ___las: "]: lastError != CryptorError.kOk, reset state to kNew", + decode: "decodeFunction::decryptFrameInternal: decrypted: " + }; + var type$ = (function rtii() { + var findType = A.findType; + return { + $env_1_1_void: findType("@<~>"), + AsyncError: findType("AsyncError"), + Base64Codec: findType("Base64Codec"), + ByteBuffer: findType("ByteBuffer"), + ByteData: findType("ByteData"), + ConstantMapView_Symbol_dynamic: findType("ConstantMapView"), + EfficientLengthIterable_dynamic: findType("EfficientLengthIterable<@>"), + Error: findType("Error"), + Float32List: findType("Float32List"), + Float64List: findType("Float64List"), + FrameCryptor: findType("FrameCryptor"), + Function: findType("Function"), + Future_dynamic: findType("Future<@>"), + Future_void_Function_JSObject_JSObject: findType("Future<~>(JSObject,JSObject)"), + Int16List: findType("Int16List"), + Int32List: findType("Int32List"), + Int8List: findType("Int8List"), + Invocation: findType("Invocation"), + Iterable_dynamic: findType("Iterable<@>"), + Iterable_int: findType("Iterable"), + Iterable_nullable_Object: findType("Iterable"), + JSArray_String: findType("JSArray"), + JSArray_dynamic: findType("JSArray<@>"), + JSArray_int: findType("JSArray"), + JSArray_nullable_Object: findType("JSArray"), + JSNull: findType("JSNull"), + JSObject: findType("JSObject"), + JavaScriptFunction: findType("JavaScriptFunction"), + JavaScriptIndexingBehavior_dynamic: findType("JavaScriptIndexingBehavior<@>"), + JsLinkedHashMap_Symbol_dynamic: findType("JsLinkedHashMap"), + KeySet: findType("KeySet"), + List_dynamic: findType("List<@>"), + List_int: findType("List"), + List_nullable_KeySet: findType("List"), + LogRecord: findType("LogRecord"), + Logger: findType("Logger"), + Map_dynamic_dynamic: findType("Map<@,@>"), + Map_of_nullable_Object_and_nullable_Object: findType("Map"), + NativeByteBuffer: findType("NativeByteBuffer"), + Null: findType("Null"), + Object: findType("Object"), + ParticipantKeyHandler: findType("ParticipantKeyHandler"), + Record: findType("Record"), + StackTrace: findType("StackTrace"), + String: findType("String"), + Symbol: findType("Symbol0"), + TrustedGetRuntimeType: findType("TrustedGetRuntimeType"), + TypeError: findType("TypeError"), + Uint16List: findType("Uint16List"), + Uint32List: findType("Uint32List"), + Uint8ClampedList: findType("Uint8ClampedList"), + Uint8List: findType("Uint8List"), + UnknownJavaScriptObject: findType("UnknownJavaScriptObject"), + _Future_dynamic: findType("_Future<@>"), + _Future_int: findType("_Future"), + _IdentityHashMap_of_nullable_Object_and_nullable_Object: findType("_IdentityHashMap"), + _SyncBroadcastStreamController_LogRecord: findType("_SyncBroadcastStreamController"), + bool: findType("bool"), + bool_Function_Object: findType("bool(Object)"), + double: findType("double"), + dynamic: findType("@"), + dynamic_Function: findType("@()"), + dynamic_Function_Object: findType("@(Object)"), + dynamic_Function_Object_StackTrace: findType("@(Object,StackTrace)"), + int: findType("int"), + legacy_Never: findType("0&*"), + legacy_Object: findType("Object*"), + nullable_Future_Null: findType("Future?"), + nullable_KeySet: findType("KeySet?"), + nullable_Object: findType("Object?"), + nullable_StreamController_LogRecord: findType("StreamController?"), + nullable_String: findType("String?"), + nullable_Uint8List: findType("Uint8List?"), + nullable__FutureListener_dynamic_dynamic: findType("_FutureListener<@,@>?"), + nullable_void_Function: findType("~()?"), + num: findType("num"), + void: findType("~"), + void_Function: findType("~()"), + void_Function_Object: findType("~(Object)"), + void_Function_Object_StackTrace: findType("~(Object,StackTrace)") + }; + })(); + (function constants() { + var makeConstList = hunkHelpers.makeConstList; + B.Interceptor_methods = J.Interceptor.prototype; + B.JSArray_methods = J.JSArray.prototype; + B.JSInt_methods = J.JSInt.prototype; + B.JSString_methods = J.JSString.prototype; + B.JavaScriptFunction_methods = J.JavaScriptFunction.prototype; + B.JavaScriptObject_methods = J.JavaScriptObject.prototype; + B.NativeByteData_methods = A.NativeByteData.prototype; + B.NativeUint8List_methods = A.NativeUint8List.prototype; + B.PlainJavaScriptObject_methods = J.PlainJavaScriptObject.prototype; + B.UnknownJavaScriptObject_methods = J.UnknownJavaScriptObject.prototype; + B.C_Base64Decoder = new A.Base64Decoder(); + B.C_Base64Encoder = new A.Base64Encoder(); + B.C_JS_CONST = function getTagFallback(o) { + var s = Object.prototype.toString.call(o); + return s.substring(8, s.length - 1); +}; + B.C_JS_CONST0 = function() { + var toStringFunction = Object.prototype.toString; + function getTag(o) { + var s = toStringFunction.call(o); + return s.substring(8, s.length - 1); + } + function getUnknownTag(object, tag) { + if (/^HTML[A-Z].*Element$/.test(tag)) { + var name = toStringFunction.call(object); + if (name == "[object Object]") return null; + return "HTMLElement"; + } + } + function getUnknownTagGenericBrowser(object, tag) { + if (object instanceof HTMLElement) return "HTMLElement"; + return getUnknownTag(object, tag); + } + function prototypeForTag(tag) { + if (typeof window == "undefined") return null; + if (typeof window[tag] == "undefined") return null; + var constructor = window[tag]; + if (typeof constructor != "function") return null; + return constructor.prototype; + } + function discriminator(tag) { return null; } + var isBrowser = typeof HTMLElement == "function"; + return { + getTag: getTag, + getUnknownTag: isBrowser ? getUnknownTagGenericBrowser : getUnknownTag, + prototypeForTag: prototypeForTag, + discriminator: discriminator }; +}; + B.C_JS_CONST6 = function(getTagFallback) { + return function(hooks) { + if (typeof navigator != "object") return hooks; + var userAgent = navigator.userAgent; + if (typeof userAgent != "string") return hooks; + if (userAgent.indexOf("DumpRenderTree") >= 0) return hooks; + if (userAgent.indexOf("Chrome") >= 0) { + function confirm(p) { + return typeof window == "object" && window[p] && window[p].name == p; + } + if (confirm("Window") && confirm("HTMLElement")) return hooks; + } + hooks.getTag = getTagFallback; + }; +}; + B.C_JS_CONST1 = function(hooks) { + if (typeof dartExperimentalFixupGetTag != "function") return hooks; + hooks.getTag = dartExperimentalFixupGetTag(hooks.getTag); +}; + B.C_JS_CONST5 = function(hooks) { + if (typeof navigator != "object") return hooks; + var userAgent = navigator.userAgent; + if (typeof userAgent != "string") return hooks; + if (userAgent.indexOf("Firefox") == -1) return hooks; + var getTag = hooks.getTag; + var quickMap = { + "BeforeUnloadEvent": "Event", + "DataTransfer": "Clipboard", + "GeoGeolocation": "Geolocation", + "Location": "!Location", + "WorkerMessageEvent": "MessageEvent", + "XMLDocument": "!Document"}; + function getTagFirefox(o) { + var tag = getTag(o); + return quickMap[tag] || tag; + } + hooks.getTag = getTagFirefox; +}; + B.C_JS_CONST4 = function(hooks) { + if (typeof navigator != "object") return hooks; + var userAgent = navigator.userAgent; + if (typeof userAgent != "string") return hooks; + if (userAgent.indexOf("Trident/") == -1) return hooks; + var getTag = hooks.getTag; + var quickMap = { + "BeforeUnloadEvent": "Event", + "DataTransfer": "Clipboard", + "HTMLDDElement": "HTMLElement", + "HTMLDTElement": "HTMLElement", + "HTMLPhraseElement": "HTMLElement", + "Position": "Geoposition" + }; + function getTagIE(o) { + var tag = getTag(o); + var newTag = quickMap[tag]; + if (newTag) return newTag; + if (tag == "Object") { + if (window.DataView && (o instanceof window.DataView)) return "DataView"; + } + return tag; + } + function prototypeForTagIE(tag) { + var constructor = window[tag]; + if (constructor == null) return null; + return constructor.prototype; + } + hooks.getTag = getTagIE; + hooks.prototypeForTag = prototypeForTagIE; +}; + B.C_JS_CONST2 = function(hooks) { + var getTag = hooks.getTag; + var prototypeForTag = hooks.prototypeForTag; + function getTagFixed(o) { + var tag = getTag(o); + if (tag == "Document") { + if (!!o.xmlVersion) return "!Document"; + return "!HTMLDocument"; + } + return tag; + } + function prototypeForTagFixed(tag) { + if (tag == "Document") return null; + return prototypeForTag(tag); + } + hooks.getTag = getTagFixed; + hooks.prototypeForTag = prototypeForTagFixed; +}; + B.C_JS_CONST3 = function(hooks) { return hooks; } +; + B.C_OutOfMemoryError = new A.OutOfMemoryError(); + B.C_SentinelValue = new A.SentinelValue(); + B.C__Required = new A._Required(); + B.C__RootZone = new A._RootZone(); + B.C__StringStackTrace = new A._StringStackTrace(); + B.CryptorError_0 = new A.CryptorError("kNew"); + B.CryptorError_1 = new A.CryptorError("kOk"); + B.CryptorError_2 = new A.CryptorError("kDecryptError"); + B.CryptorError_3 = new A.CryptorError("kEncryptError"); + B.CryptorError_5 = new A.CryptorError("kMissingKey"); + B.CryptorError_6 = new A.CryptorError("kKeyRatcheted"); + B.CryptorError_7 = new A.CryptorError("kInternalError"); + B.CryptorError_8 = new A.CryptorError("kDisposed"); + B.Level_CONFIG_700 = new A.Level("CONFIG", 700); + B.Level_FINER_400 = new A.Level("FINER", 400); + B.Level_FINE_500 = new A.Level("FINE", 500); + B.Level_INFO_800 = new A.Level("INFO", 800); + B.Level_WARNING_900 = new A.Level("WARNING", 900); + B.List_empty = A._setArrayType(makeConstList([]), type$.JSArray_dynamic); + B.Object_empty = {}; + B.Map_empty = new A.ConstantStringMap(B.Object_empty, [], A.findType("ConstantStringMap")); + B.Symbol_call = new A.Symbol("call"); + B.Type_ByteBuffer_rqD = A.typeLiteral("ByteBuffer"); + B.Type_ByteData_9dB = A.typeLiteral("ByteData"); + B.Type_Float32List_9Kz = A.typeLiteral("Float32List"); + B.Type_Float64List_9Kz = A.typeLiteral("Float64List"); + B.Type_Int16List_s5h = A.typeLiteral("Int16List"); + B.Type_Int32List_O8Z = A.typeLiteral("Int32List"); + B.Type_Int8List_rFV = A.typeLiteral("Int8List"); + B.Type_JSObject_ttY = A.typeLiteral("JSObject"); + B.Type_Object_A4p = A.typeLiteral("Object"); + B.Type_Uint16List_kmP = A.typeLiteral("Uint16List"); + B.Type_Uint32List_kmP = A.typeLiteral("Uint32List"); + B.Type_Uint8ClampedList_04U = A.typeLiteral("Uint8ClampedList"); + B.Type_Uint8List_8Eb = A.typeLiteral("Uint8List"); + })(); + (function staticFields() { + $._JS_INTEROP_INTERCEPTOR_TAG = null; + $.toStringVisiting = A._setArrayType([], A.findType("JSArray")); + $.Primitives__identityHashCodeProperty = null; + $.BoundClosure__receiverFieldNameCache = null; + $.BoundClosure__interceptorFieldNameCache = null; + $.getTagFunction = null; + $.alternateTagFunction = null; + $.prototypeForTagFunction = null; + $.dispatchRecordsForInstanceTags = null; + $.interceptorsForUncacheableTags = null; + $.initNativeDispatchFlag = null; + $._nextCallback = null; + $._lastCallback = null; + $._lastPriorityCallback = null; + $._isInCallbackLoop = false; + $.Zone__current = B.C__RootZone; + $.participantCryptors = A._setArrayType([], A.findType("JSArray")); + $.keyProviders = A.LinkedHashMap_LinkedHashMap$_empty(type$.String, A.findType("KeyProvider")); + $.LogRecord__nextNumber = 0; + $.Logger__loggers = A.LinkedHashMap_LinkedHashMap$_empty(type$.String, type$.Logger); + })(); + (function lazyInitializers() { + var _lazyFinal = hunkHelpers.lazyFinal, + _lazy = hunkHelpers.lazy; + _lazyFinal($, "DART_CLOSURE_PROPERTY_NAME", "$get$DART_CLOSURE_PROPERTY_NAME", () => A.getIsolateAffinityTag("_$dart_dartClosure")); + _lazyFinal($, "_CopyingBytesBuilder__emptyList", "$get$_CopyingBytesBuilder__emptyList", () => A.NativeUint8List_NativeUint8List(0)); + _lazyFinal($, "TypeErrorDecoder_noSuchMethodPattern", "$get$TypeErrorDecoder_noSuchMethodPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn({ + toString: function() { + return "$receiver$"; + } + }))); + _lazyFinal($, "TypeErrorDecoder_notClosurePattern", "$get$TypeErrorDecoder_notClosurePattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn({$method$: null, + toString: function() { + return "$receiver$"; + } + }))); + _lazyFinal($, "TypeErrorDecoder_nullCallPattern", "$get$TypeErrorDecoder_nullCallPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn(null))); + _lazyFinal($, "TypeErrorDecoder_nullLiteralCallPattern", "$get$TypeErrorDecoder_nullLiteralCallPattern", () => A.TypeErrorDecoder_extractPattern(function() { + var $argumentsExpr$ = "$arguments$"; + try { + null.$method$($argumentsExpr$); + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "TypeErrorDecoder_undefinedCallPattern", "$get$TypeErrorDecoder_undefinedCallPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokeCallErrorOn(void 0))); + _lazyFinal($, "TypeErrorDecoder_undefinedLiteralCallPattern", "$get$TypeErrorDecoder_undefinedLiteralCallPattern", () => A.TypeErrorDecoder_extractPattern(function() { + var $argumentsExpr$ = "$arguments$"; + try { + (void 0).$method$($argumentsExpr$); + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "TypeErrorDecoder_nullPropertyPattern", "$get$TypeErrorDecoder_nullPropertyPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokePropertyErrorOn(null))); + _lazyFinal($, "TypeErrorDecoder_nullLiteralPropertyPattern", "$get$TypeErrorDecoder_nullLiteralPropertyPattern", () => A.TypeErrorDecoder_extractPattern(function() { + try { + null.$method$; + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "TypeErrorDecoder_undefinedPropertyPattern", "$get$TypeErrorDecoder_undefinedPropertyPattern", () => A.TypeErrorDecoder_extractPattern(A.TypeErrorDecoder_provokePropertyErrorOn(void 0))); + _lazyFinal($, "TypeErrorDecoder_undefinedLiteralPropertyPattern", "$get$TypeErrorDecoder_undefinedLiteralPropertyPattern", () => A.TypeErrorDecoder_extractPattern(function() { + try { + (void 0).$method$; + } catch (e) { + return e.message; + } + }())); + _lazyFinal($, "_AsyncRun__scheduleImmediateClosure", "$get$_AsyncRun__scheduleImmediateClosure", () => A._AsyncRun__initializeScheduleImmediate()); + _lazyFinal($, "_Base64Decoder__inverseAlphabet", "$get$_Base64Decoder__inverseAlphabet", () => new Int8Array(A._ensureNativeList(A._setArrayType([-2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -1, -2, -2, -2, -2, -2, 62, -2, 62, -2, 63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -2, -2, -2, -1, -2, -2, -2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -2, -2, -2, -2, 63, -2, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -2, -2, -2, -2, -2], type$.JSArray_int)))); + _lazy($, "_Base64Decoder__emptyBuffer", "$get$_Base64Decoder__emptyBuffer", () => A.NativeUint8List_NativeUint8List(0)); + _lazyFinal($, "_hashSeed", "$get$_hashSeed", () => A.objectHashCode(B.Type_Object_A4p)); + _lazyFinal($, "Random__secureRandom", "$get$Random__secureRandom", () => { + var t1 = new A._JSSecureRandom(A.NativeByteData_NativeByteData(8)); + t1._JSSecureRandom$0(); + return t1; + }); + _lazyFinal($, "logger", "$get$logger", () => A.Logger_Logger("E2EE.Worker")); + _lazyFinal($, "Logger_root", "$get$Logger_root", () => A.Logger_Logger("")); + })(); + (function nativeSupport() { + !function() { + var intern = function(s) { + var o = {}; + o[s] = 1; + return Object.keys(hunkHelpers.convertToFastObject(o))[0]; + }; + init.getIsolateTag = function(name) { + return intern("___dart_" + name + init.isolateTag); + }; + var tableProperty = "___dart_isolate_tags_"; + var usedProperties = Object[tableProperty] || (Object[tableProperty] = Object.create(null)); + var rootProperty = "_ZxYxX"; + for (var i = 0;; i++) { + var property = intern(rootProperty + "_" + i + "_"); + if (!(property in usedProperties)) { + usedProperties[property] = 1; + init.isolateTag = property; + break; + } + } + init.dispatchPropertyName = init.getIsolateTag("dispatch_record"); + }(); + hunkHelpers.setOrUpdateInterceptorsByTag({ArrayBuffer: A.NativeByteBuffer, ArrayBufferView: A.NativeTypedData, DataView: A.NativeByteData, Float32Array: A.NativeFloat32List, Float64Array: A.NativeFloat64List, Int16Array: A.NativeInt16List, Int32Array: A.NativeInt32List, Int8Array: A.NativeInt8List, Uint16Array: A.NativeUint16List, Uint32Array: A.NativeUint32List, Uint8ClampedArray: A.NativeUint8ClampedList, CanvasPixelArray: A.NativeUint8ClampedList, Uint8Array: A.NativeUint8List}); + hunkHelpers.setOrUpdateLeafTags({ArrayBuffer: true, ArrayBufferView: false, DataView: true, Float32Array: true, Float64Array: true, Int16Array: true, Int32Array: true, Int8Array: true, Uint16Array: true, Uint32Array: true, Uint8ClampedArray: true, CanvasPixelArray: true, Uint8Array: false}); + A.NativeTypedArray.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A.NativeTypedArrayOfDouble.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A._NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin.$nativeSuperclassTag = "ArrayBufferView"; + A.NativeTypedArrayOfInt.$nativeSuperclassTag = "ArrayBufferView"; + })(); + Function.prototype.call$1 = function(a) { + return this(a); + }; + Function.prototype.call$0 = function() { + return this(); + }; + Function.prototype.call$2 = function(a, b) { + return this(a, b); + }; + Function.prototype.call$3 = function(a, b, c) { + return this(a, b, c); + }; + Function.prototype.call$4 = function(a, b, c, d) { + return this(a, b, c, d); + }; + Function.prototype.call$1$1 = function(a) { + return this(a); + }; + convertAllToFastObject(holders); + convertToFastObject($); + (function(callback) { + if (typeof document === "undefined") { + callback(null); + return; + } + if (typeof document.currentScript != "undefined") { + callback(document.currentScript); + return; + } + var scripts = document.scripts; + function onLoad(event) { + for (var i = 0; i < scripts.length; ++i) { + scripts[i].removeEventListener("load", onLoad, false); + } + callback(event.target); + } + for (var i = 0; i < scripts.length; ++i) { + scripts[i].addEventListener("load", onLoad, false); + } + })(function(currentScript) { + init.currentScript = currentScript; + var callMain = A.main; + if (typeof dartMainRunner === "function") { + dartMainRunner(callMain, []); + } else { + callMain([]); + } + }); +})(); + +//# sourceMappingURL=e2ee.worker.dart.js.map diff --git a/example/web/e2ee.worker.dart.js.deps b/example/web/e2ee.worker.dart.js.deps new file mode 100644 index 0000000000..45a7692f9d --- /dev/null +++ b/example/web/e2ee.worker.dart.js.deps @@ -0,0 +1,439 @@ +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/collection.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/algorithms.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/boollist.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/canonicalized_map.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_iterable.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_iterator.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_list.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/combined_wrappers/combined_map.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/comparators.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/empty_unmodifiable_set.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/equality.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/equality_map.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/equality_set.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/functions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/iterable_extensions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/iterable_zip.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/list_extensions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/priority_queue.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/queue_list.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/union_set.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/union_set_controller.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/unmodifiable_wrappers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/utils.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/wrappers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/js-0.7.1/lib/js.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/js-0.7.1/lib/js_util.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/logging.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/level.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/log_record.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/logger.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/accelerometer.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/angle_instanced_arrays.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/attribution_reporting_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/background_sync.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/battery_status.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/clipboard_apis.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/compression.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/console.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/cookie_store.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/credential_management.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/csp.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_animations.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_animations_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_cascade.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_cascade_6.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_conditional.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_conditional_5.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_contain.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_counter_styles.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_font_loading.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_fonts.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_highlight_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_masking.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_paint_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_properties_values_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_transitions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_transitions_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_typed_om.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_view_transitions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/css_view_transitions_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/cssom.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/cssom_view.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/digital_identities.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/dom.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/dom_parsing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/encoding.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/encrypted_media.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/entries_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/event_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_blend_minmax.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_color_buffer_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_color_buffer_half_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_disjoint_timer_query.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_disjoint_timer_query_webgl2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_float_blend.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_frag_depth.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_shader_texture_lod.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_srgb.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_compression_bptc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_compression_rgtc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_filter_anisotropic.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ext_texture_norm16.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fedcm.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fetch.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fido.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fileapi.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/filter_effects.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fs.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/fullscreen.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/gamepad.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/generic_sensor.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/geolocation.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/geometry.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/gyroscope.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/hr_time.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/html.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/image_capture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/indexeddb.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/intersection_observer.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/khr_parallel_shader_compile.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/largest_contentful_paint.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mathml_core.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/media_capabilities.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/media_playback_quality.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/media_source.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediacapture_fromelement.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediacapture_streams.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediacapture_transform.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediasession.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mediastream_recording.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/mst_content_hint.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/navigation_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/netinfo.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/notifications.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_draw_buffers_indexed.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_element_index_uint.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_fbo_render_mipmap.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_standard_derivatives.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_float_linear.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_half_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_texture_half_float_linear.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/oes_vertex_array_object.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/orientation_event.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/orientation_sensor.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/ovr_multiview2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/paint_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/payment_request.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/performance_timeline.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/permissions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/picture_in_picture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/pointerevents.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/pointerlock.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/private_network_access.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/push_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/referrer_policy.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/remote_playback.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/reporting.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/requestidlecallback.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/resize_observer.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/resource_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/saa_non_cookie_storage.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/sanitizer_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/scheduling_apis.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/screen_capture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/screen_orientation.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/screen_wake_lock.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/secure_payment_confirmation.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/selection_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/server_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/service_workers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/speech_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/storage.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/streams.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/svg.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/svg_animations.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/touch_events.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/trust_token_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/trusted_types.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/uievents.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/url.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/user_timing.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/vibration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/video_rvfc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/wasm_js_api.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_animations.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_animations_2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_bluetooth.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_locks.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_otp.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/web_share.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webaudio.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webauthn.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_av1_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_avc_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_hevc_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcodecs_vp9_codec_registration.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webcryptoapi.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl1.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl2.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_color_buffer_float.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_astc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_etc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_etc1.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_pvrtc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_s3tc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_compressed_texture_s3tc_srgb.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_debug_renderer_info.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_debug_shaders.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_depth_texture.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_draw_buffers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_lose_context.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgl_multi_draw.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webgpu.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webidl.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webmidi.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc_encoded_transform.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc_identity.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webrtc_priority.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/websockets.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webtransport.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webvtt.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webxr.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/webxr_hand_input.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/dom/xhr.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/cross_origin.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/enums.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/events/events.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/events/providers.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/events/streams.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/extensions.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/http.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/lists.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/src/helpers/renames.dart +file:///Users/duan/.pub-cache/hosted/pub.dev/web-1.1.0/lib/web.dart +file:///Users/duan/Desktop/dart-webrtc/.dart_tool/package_config.json +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.cryptor.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.keyhandler.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.logger.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.sfi_guard.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.utils.dart +file:///Users/duan/Desktop/dart-webrtc/lib/src/e2ee.worker/e2ee.worker.dart +file:///Users/duan/bin/flutter/bin/cache/dart-sdk/lib/_internal/dart2js_platform.dill +file:///Users/duan/bin/flutter/bin/cache/dart-sdk/lib/libraries.json +org-dartlang-sdk:///lib/_http/crypto.dart +org-dartlang-sdk:///lib/_http/embedder_config.dart +org-dartlang-sdk:///lib/_http/http.dart +org-dartlang-sdk:///lib/_http/http_date.dart +org-dartlang-sdk:///lib/_http/http_headers.dart +org-dartlang-sdk:///lib/_http/http_impl.dart +org-dartlang-sdk:///lib/_http/http_parser.dart +org-dartlang-sdk:///lib/_http/http_session.dart +org-dartlang-sdk:///lib/_http/http_testing.dart +org-dartlang-sdk:///lib/_http/overrides.dart +org-dartlang-sdk:///lib/_http/websocket.dart +org-dartlang-sdk:///lib/_http/websocket_impl.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/annotations.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/async_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/bigint_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/collection_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/constant_map.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/convert_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/core_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/dart2js_only.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/dart2js_runtime_metrics.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/developer_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/foreign_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/instantiation.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/interceptors.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/internal_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/io_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/isolate_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_allow_interop_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_array.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_names.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_number.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_primitives.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_string.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/late_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/linked_hash_map.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/math_patch.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_typed_data.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/records.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/regexp_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/string_helper.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/synced/array_flags.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/synced/embedded_names.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/synced/invocation_mirror_constants.dart +org-dartlang-sdk:///lib/_internal/js_runtime/lib/typed_data_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/convert_utf_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/date_time_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_unsafe_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_types.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/js_util_patch.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/rti.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/async_status_codes.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/embedded_names.dart +org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/recipe_syntax.dart +org-dartlang-sdk:///lib/async/async.dart +org-dartlang-sdk:///lib/async/async_error.dart +org-dartlang-sdk:///lib/async/broadcast_stream_controller.dart +org-dartlang-sdk:///lib/async/deferred_load.dart +org-dartlang-sdk:///lib/async/future.dart +org-dartlang-sdk:///lib/async/future_extensions.dart +org-dartlang-sdk:///lib/async/future_impl.dart +org-dartlang-sdk:///lib/async/schedule_microtask.dart +org-dartlang-sdk:///lib/async/stream.dart +org-dartlang-sdk:///lib/async/stream_controller.dart +org-dartlang-sdk:///lib/async/stream_impl.dart +org-dartlang-sdk:///lib/async/stream_pipe.dart +org-dartlang-sdk:///lib/async/stream_transformers.dart +org-dartlang-sdk:///lib/async/timer.dart +org-dartlang-sdk:///lib/async/zone.dart +org-dartlang-sdk:///lib/collection/collection.dart +org-dartlang-sdk:///lib/collection/collections.dart +org-dartlang-sdk:///lib/collection/hash_map.dart +org-dartlang-sdk:///lib/collection/hash_set.dart +org-dartlang-sdk:///lib/collection/iterable.dart +org-dartlang-sdk:///lib/collection/iterator.dart +org-dartlang-sdk:///lib/collection/linked_hash_map.dart +org-dartlang-sdk:///lib/collection/linked_hash_set.dart +org-dartlang-sdk:///lib/collection/linked_list.dart +org-dartlang-sdk:///lib/collection/list.dart +org-dartlang-sdk:///lib/collection/maps.dart +org-dartlang-sdk:///lib/collection/queue.dart +org-dartlang-sdk:///lib/collection/set.dart +org-dartlang-sdk:///lib/collection/splay_tree.dart +org-dartlang-sdk:///lib/convert/ascii.dart +org-dartlang-sdk:///lib/convert/base64.dart +org-dartlang-sdk:///lib/convert/byte_conversion.dart +org-dartlang-sdk:///lib/convert/chunked_conversion.dart +org-dartlang-sdk:///lib/convert/codec.dart +org-dartlang-sdk:///lib/convert/convert.dart +org-dartlang-sdk:///lib/convert/converter.dart +org-dartlang-sdk:///lib/convert/encoding.dart +org-dartlang-sdk:///lib/convert/html_escape.dart +org-dartlang-sdk:///lib/convert/json.dart +org-dartlang-sdk:///lib/convert/latin1.dart +org-dartlang-sdk:///lib/convert/line_splitter.dart +org-dartlang-sdk:///lib/convert/string_conversion.dart +org-dartlang-sdk:///lib/convert/utf.dart +org-dartlang-sdk:///lib/core/annotations.dart +org-dartlang-sdk:///lib/core/bigint.dart +org-dartlang-sdk:///lib/core/bool.dart +org-dartlang-sdk:///lib/core/comparable.dart +org-dartlang-sdk:///lib/core/core.dart +org-dartlang-sdk:///lib/core/date_time.dart +org-dartlang-sdk:///lib/core/double.dart +org-dartlang-sdk:///lib/core/duration.dart +org-dartlang-sdk:///lib/core/enum.dart +org-dartlang-sdk:///lib/core/errors.dart +org-dartlang-sdk:///lib/core/exceptions.dart +org-dartlang-sdk:///lib/core/function.dart +org-dartlang-sdk:///lib/core/identical.dart +org-dartlang-sdk:///lib/core/int.dart +org-dartlang-sdk:///lib/core/invocation.dart +org-dartlang-sdk:///lib/core/iterable.dart +org-dartlang-sdk:///lib/core/iterator.dart +org-dartlang-sdk:///lib/core/list.dart +org-dartlang-sdk:///lib/core/map.dart +org-dartlang-sdk:///lib/core/null.dart +org-dartlang-sdk:///lib/core/num.dart +org-dartlang-sdk:///lib/core/object.dart +org-dartlang-sdk:///lib/core/pattern.dart +org-dartlang-sdk:///lib/core/print.dart +org-dartlang-sdk:///lib/core/record.dart +org-dartlang-sdk:///lib/core/regexp.dart +org-dartlang-sdk:///lib/core/set.dart +org-dartlang-sdk:///lib/core/sink.dart +org-dartlang-sdk:///lib/core/stacktrace.dart +org-dartlang-sdk:///lib/core/stopwatch.dart +org-dartlang-sdk:///lib/core/string.dart +org-dartlang-sdk:///lib/core/string_buffer.dart +org-dartlang-sdk:///lib/core/string_sink.dart +org-dartlang-sdk:///lib/core/symbol.dart +org-dartlang-sdk:///lib/core/type.dart +org-dartlang-sdk:///lib/core/uri.dart +org-dartlang-sdk:///lib/core/weak.dart +org-dartlang-sdk:///lib/developer/developer.dart +org-dartlang-sdk:///lib/developer/extension.dart +org-dartlang-sdk:///lib/developer/http_profiling.dart +org-dartlang-sdk:///lib/developer/profiler.dart +org-dartlang-sdk:///lib/developer/service.dart +org-dartlang-sdk:///lib/developer/timeline.dart +org-dartlang-sdk:///lib/html/dart2js/html_dart2js.dart +org-dartlang-sdk:///lib/html/html_common/conversions.dart +org-dartlang-sdk:///lib/html/html_common/conversions_dart2js.dart +org-dartlang-sdk:///lib/html/html_common/css_class_set.dart +org-dartlang-sdk:///lib/html/html_common/device.dart +org-dartlang-sdk:///lib/html/html_common/filtered_element_list.dart +org-dartlang-sdk:///lib/html/html_common/html_common_dart2js.dart +org-dartlang-sdk:///lib/html/html_common/lists.dart +org-dartlang-sdk:///lib/html/html_common/metadata.dart +org-dartlang-sdk:///lib/indexed_db/dart2js/indexed_db_dart2js.dart +org-dartlang-sdk:///lib/internal/async_cast.dart +org-dartlang-sdk:///lib/internal/bytes_builder.dart +org-dartlang-sdk:///lib/internal/cast.dart +org-dartlang-sdk:///lib/internal/errors.dart +org-dartlang-sdk:///lib/internal/internal.dart +org-dartlang-sdk:///lib/internal/iterable.dart +org-dartlang-sdk:///lib/internal/linked_list.dart +org-dartlang-sdk:///lib/internal/list.dart +org-dartlang-sdk:///lib/internal/patch.dart +org-dartlang-sdk:///lib/internal/print.dart +org-dartlang-sdk:///lib/internal/sort.dart +org-dartlang-sdk:///lib/internal/symbol.dart +org-dartlang-sdk:///lib/io/common.dart +org-dartlang-sdk:///lib/io/data_transformer.dart +org-dartlang-sdk:///lib/io/directory.dart +org-dartlang-sdk:///lib/io/directory_impl.dart +org-dartlang-sdk:///lib/io/embedder_config.dart +org-dartlang-sdk:///lib/io/eventhandler.dart +org-dartlang-sdk:///lib/io/file.dart +org-dartlang-sdk:///lib/io/file_impl.dart +org-dartlang-sdk:///lib/io/file_system_entity.dart +org-dartlang-sdk:///lib/io/io.dart +org-dartlang-sdk:///lib/io/io_resource_info.dart +org-dartlang-sdk:///lib/io/io_service.dart +org-dartlang-sdk:///lib/io/io_sink.dart +org-dartlang-sdk:///lib/io/link.dart +org-dartlang-sdk:///lib/io/namespace_impl.dart +org-dartlang-sdk:///lib/io/network_profiling.dart +org-dartlang-sdk:///lib/io/overrides.dart +org-dartlang-sdk:///lib/io/platform.dart +org-dartlang-sdk:///lib/io/platform_impl.dart +org-dartlang-sdk:///lib/io/process.dart +org-dartlang-sdk:///lib/io/secure_server_socket.dart +org-dartlang-sdk:///lib/io/secure_socket.dart +org-dartlang-sdk:///lib/io/security_context.dart +org-dartlang-sdk:///lib/io/service_object.dart +org-dartlang-sdk:///lib/io/socket.dart +org-dartlang-sdk:///lib/io/stdio.dart +org-dartlang-sdk:///lib/io/string_transformer.dart +org-dartlang-sdk:///lib/io/sync_socket.dart +org-dartlang-sdk:///lib/isolate/capability.dart +org-dartlang-sdk:///lib/isolate/isolate.dart +org-dartlang-sdk:///lib/js/_js.dart +org-dartlang-sdk:///lib/js/_js_annotations.dart +org-dartlang-sdk:///lib/js/_js_client.dart +org-dartlang-sdk:///lib/js/js.dart +org-dartlang-sdk:///lib/js_interop/js_interop.dart +org-dartlang-sdk:///lib/js_interop_unsafe/js_interop_unsafe.dart +org-dartlang-sdk:///lib/js_util/js_util.dart +org-dartlang-sdk:///lib/math/math.dart +org-dartlang-sdk:///lib/math/point.dart +org-dartlang-sdk:///lib/math/random.dart +org-dartlang-sdk:///lib/math/rectangle.dart +org-dartlang-sdk:///lib/svg/dart2js/svg_dart2js.dart +org-dartlang-sdk:///lib/typed_data/typed_data.dart +org-dartlang-sdk:///lib/web_audio/dart2js/web_audio_dart2js.dart +org-dartlang-sdk:///lib/web_gl/dart2js/web_gl_dart2js.dart \ No newline at end of file diff --git a/example/web/e2ee.worker.dart.js.map b/example/web/e2ee.worker.dart.js.map new file mode 100644 index 0000000000..8288579a95 --- /dev/null +++ b/example/web/e2ee.worker.dart.js.map @@ -0,0 +1,16 @@ +{ + "version": 3, + "engine": "v2", + "file": "e2ee.worker.dart.js", + "sourceRoot": "", + "sources": ["org-dartlang-sdk:///lib/_internal/js_runtime/lib/interceptors.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_helper.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_helper.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_array.dart","org-dartlang-sdk:///lib/internal/internal.dart","org-dartlang-sdk:///lib/internal/iterable.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_names.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/rti.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/date_time_patch.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/linked_hash_map.dart","org-dartlang-sdk:///lib/core/errors.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/records.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/string_helper.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/native_typed_data.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/synced/recipe_syntax.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/async_patch.dart","org-dartlang-sdk:///lib/async/future_impl.dart","org-dartlang-sdk:///lib/async/zone.dart","org-dartlang-sdk:///lib/async/async_error.dart","org-dartlang-sdk:///lib/async/schedule_microtask.dart","org-dartlang-sdk:///lib/async/stream.dart","org-dartlang-sdk:///lib/async/stream_impl.dart","org-dartlang-sdk:///lib/async/stream_controller.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/collection_patch.dart","org-dartlang-sdk:///lib/collection/maps.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/core_patch.dart","org-dartlang-sdk:///lib/convert/base64.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_string.dart","org-dartlang-sdk:///lib/core/date_time.dart","org-dartlang-sdk:///lib/core/exceptions.dart","org-dartlang-sdk:///lib/core/iterable.dart","org-dartlang-sdk:///lib/core/object.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_allow_interop_patch.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/js_util_patch.dart","../lib/src/e2ee.worker/e2ee.cryptor.dart","../lib/src/e2ee.worker/e2ee.keyhandler.dart","../lib/src/e2ee.worker/e2ee.worker.dart","../../../.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/logger.dart","../lib/src/e2ee.worker/e2ee.sfi_guard.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_primitives.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/late_helper.dart","org-dartlang-sdk:///lib/internal/errors.dart","../../../.pub-cache/hosted/pub.dev/collection-1.19.1/lib/src/iterable_extensions.dart","../lib/src/e2ee.worker/e2ee.utils.dart","org-dartlang-sdk:///lib/collection/list.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/js_number.dart","org-dartlang-sdk:///lib/internal/bytes_builder.dart","org-dartlang-sdk:///lib/typed_data/typed_data.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/internal_patch.dart","org-dartlang-sdk:///lib/internal/symbol.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/constant_map.dart","org-dartlang-sdk:///lib/async/broadcast_stream_controller.dart","org-dartlang-sdk:///lib/core/enum.dart","org-dartlang-sdk:///lib/core/null.dart","org-dartlang-sdk:///lib/core/stacktrace.dart","org-dartlang-sdk:///lib/js_util/js_util.dart","org-dartlang-sdk:///lib/_internal/js_runtime/lib/math_patch.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_patch.dart","org-dartlang-sdk:///lib/_internal/js_shared/lib/js_interop_unsafe_patch.dart","org-dartlang-sdk:///lib/convert/codec.dart","../../../.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/level.dart","../../../.pub-cache/hosted/pub.dev/logging-1.3.0/lib/src/log_record.dart","../lib/src/e2ee.worker/e2ee.logger.dart","org-dartlang-sdk:///lib/async/future.dart","org-dartlang-sdk:///lib/core/print.dart"], + "names": ["makeDispatchRecord","getNativeInterceptor","lookupInterceptorByConstructor","JS_INTEROP_INTERCEPTOR_TAG","cacheInterceptorOnConstructor","JSArray.fixed","JSArray.markFixed","SystemHash.combine","SystemHash.finish","checkNotNullable","isToStringVisiting","MappedIterable","unminifyOrTag","isJsIndexable","S","Primitives.objectHashCode","Primitives.objectTypeName","Primitives._objectTypeNameNewRti","Primitives.safeToString","Primitives.stringSafeToString","Primitives.stringFromNativeUint8List","Primitives.lazyAsJsDate","Primitives.getYear","Primitives.getMonth","Primitives.getDay","Primitives.getHours","Primitives.getMinutes","Primitives.getSeconds","Primitives.getMilliseconds","Primitives.functionNoSuchMethod","createUnmangledInvocationMirror","Primitives.applyFunction","Primitives._generalApplyFunction","JsLinkedHashMap.isNotEmpty","Primitives.extractStackTrace","Primitives.trySetStackTrace","iae","ioore","diagnoseIndexError","diagnoseRangeError","argumentErrorValue","wrapException","initializeExceptionWrapper","toStringWrapper","throwExpression","throwExpressionWithWrapper","throwUnsupportedOperation","_diagnoseUnsupportedOperation","throwConcurrentModificationError","TypeErrorDecoder.extractPattern","TypeErrorDecoder.provokeCallErrorOn","TypeErrorDecoder.provokePropertyErrorOn","JsNoSuchMethodError","unwrapException","saveStackTrace","_unwrapNonDartException","getTraceFromException","objectHashCode","fillLiteralMap","_invokeClosure","convertDartClosureToJS","convertDartClosureToJSUncached","Closure.fromTearOff","Closure._computeSignatureFunctionNewRti","Closure.cspForwardCall","Closure.forwardCallTo","Closure.cspForwardInterceptedCall","Closure.forwardInterceptedCallTo","closureFromTearOff","BoundClosure.evalRecipe","evalInInstance","_rtiEval","BoundClosure.receiverOf","BoundClosure.interceptorOf","BoundClosure._computeFieldNamed","boolConversionCheck","assertThrow","throwCyclicInit","getIsolateAffinityTag","defineProperty","lookupAndCacheInterceptor","setDispatchProperty","patchInstance","lookupInterceptor","patchProto","patchInteriorProto","makeLeafDispatchRecord","makeDefaultDispatchRecord","initNativeDispatch","initNativeDispatchContinue","initHooks","applyHooksTransformer","createRecordTypePredicate","quoteStringForRegExp","NativeByteData","_ensureNativeList","NativeUint8List","NativeUint8List.view","_checkValidIndex","_checkValidRange","Rti._getQuestionFromStar","Rti._getStarArgument","Rti._getFutureFromFutureOr","Rti._getFutureOrArgument","Rti._isUnionOfFunctionType","Rti._getKind","Rti._getCanonicalRecipe","findType","_substitute","Rti._getInterfaceName","Rti._getBindingBase","Rti._getRecordPartialShapeTag","Rti._getReturnType","Rti._getGenericFunctionBase","Rti._getGenericFunctionParameterIndex","_substituteArray","_substituteNamed","_substituteFunctionParameters","_FunctionParameters.allocate","_setArrayType","closureFunctionType","instanceOrFunctionType","instanceType","_arrayInstanceType","_instanceType","_instanceTypeFromConstructor","_instanceTypeFromConstructorMiss","getTypeFromTypesTable","getRuntimeTypeOfDartObject","_structuralTypeOf","_instanceFunctionType","createRuntimeType","_createAndCacheRuntimeType","_createRuntimeType","_Type","typeLiteral","_installSpecializedIsTest","isDefinitelyTopType","_recordSpecializedIsTest","_finishIsFn","_installSpecializedAsCheck","_nullIs","_generalIsTestImplementation","_generalNullableIsTestImplementation","Rti._getQuestionArgument","_isTestViaProperty","_isListTestViaProperty","_generalAsCheckImplementation","_generalNullableAsCheckImplementation","_failedAsCheck","_Error.compose","_TypeError.fromMessage","_TypeError.forType","_isFutureOr","_isObject","_asObject","_isTop","_asTop","_isNever","_isBool","_asBool","_asBoolS","_asBoolQ","_asDouble","_asDoubleS","_asDoubleQ","_isInt","_asInt","_asIntS","_asIntQ","_isNum","_asNum","_asNumS","_asNumQ","_isString","_asString","_asStringS","_asStringQ","_rtiArrayToString","_recordRtiToString","_functionRtiToString","isLegacyObjectType","_rtiToString","_unminifyOrTag","_Universe.findRule","_Universe._findRule","_Universe.findErasedType","_Universe.addRules","_Universe.addErasedTypes","_Universe.eval","_Universe.evalInEnvironment","_Universe.bind","_Universe._installTypeTests","_Universe._lookupTerminalRti","Rti.allocate","_Universe._createTerminalRti","_Universe._installRti","_Universe._lookupStarRti","_Universe._createStarRti","_Universe._lookupQuestionRti","_Universe._createQuestionRti","_Universe._lookupFutureOrRti","_Universe._createFutureOrRti","_Universe._lookupGenericFunctionParameterRti","_Universe._createGenericFunctionParameterRti","_Universe._canonicalRecipeJoin","_Universe._canonicalRecipeJoinNamed","_Universe._lookupInterfaceRti","_Universe._canonicalRecipeOfInterface","_Universe._createInterfaceRti","_Universe._lookupBindingRti","_Universe._createBindingRti","_Universe._lookupRecordRti","_Universe._createRecordRti","_Universe._lookupFunctionRti","_Universe._canonicalRecipeOfFunction","_Universe._canonicalRecipeOfFunctionParameters","_Universe._createFunctionRti","_Universe._lookupGenericFunctionRti","_Universe._createGenericFunctionRti","_Parser.create","_Parser.parse","_Parser.toGenericFunctionParameter","_Parser.pushStackFrame","_Parser.collectArray","_Parser.handleOptionalGroup","_Parser.collectNamed","_Parser.handleNamedGroup","_Parser.handleStartRecord","_Parser.handleDigit","_Parser.handleIdentifier","_Universe.evalTypeVariable","_Parser.handleTypeArguments","_Parser.handleArguments","_Parser.handleExtendedOperations","_Parser.toType","_Parser.toTypes","_Parser.toTypesNamed","_Parser.indexToType","isSubtype","_isSubtype","isBottomType","_isFunctionSubtype","_isInterfaceSubtype","_Utils.newArrayOrEmpty","_areArgumentsSubtypes","_isRecordSubtype","isNullable","isSoundTopType","_Utils.objectAssign","_AsyncRun._initializeScheduleImmediate","_AsyncRun._scheduleImmediateJsOverride","_AsyncRun._scheduleImmediateWithSetImmediate","_AsyncRun._scheduleImmediateWithTimer","_TimerImpl","_makeAsyncAwaitCompleter","_AsyncAwaitCompleter._future","_asyncStartSync","_asyncAwait","_asyncReturn","_asyncRethrow","_awaitOnObject","_wrapJsFunctionForAsync","AsyncError.defaultStackTrace","_interceptError","_interceptUserError","_Future._chainCoreFuture","_Future._propagateToListeners","_registerErrorHandler","_microtaskLoop","_startMicrotaskLoop","_scheduleAsyncCallback","_schedulePriorityAsyncCallback","scheduleMicrotask","StreamIterator","_runGuarded","_BufferingStreamSubscription._registerErrorHandler","_nullErrorHandler","_nullDoneHandler","_rootHandleError","_rootRun","_rootRunUnary","_rootRunBinary","_rootScheduleMicrotask","_HashMap._getTableEntry","_HashMap._setTableEntry","_HashMap._newHashTable","LinkedHashMap._literal","LinkedHashMap._empty","MapBase.mapToString","_Base64Encoder.encodeChunk","_Base64Decoder.decodeChunk","_Base64Decoder._allocateBuffer","_Base64Decoder._trimPaddingChars","_Base64Decoder._checkPadding","Error._throw","List.filled","List.of","List._of","List._ofArray","JSArray.markGrowable","String.fromCharCodes","String._stringFromUint8List","StringBuffer._writeAll","NoSuchMethodError.withInvocation","StackTrace.current","DateTime._fourDigits","DateTime._threeDigits","DateTime._twoDigits","Error.safeToString","Error.throwWithStackTrace","AssertionError","ArgumentError","ArgumentError.value","RangeError.value","RangeError.range","RangeError.checkValidRange","RangeError.checkNotNegative","IndexError.withLength","UnsupportedError","UnimplementedError","StateError","ConcurrentModificationError","Exception","FormatException","Iterable.iterableToShortString","Iterable.iterableToFullString","_iterablePartsToStrings","Object.hash","_convertDartFunctionFast","_callDartFunctionFast","allowInterop","_functionToJS1","_callDartFunctionFast1","_noJsifyRequired","jsify","callMethod","promiseToFuture","_Completer.future","Completer","_noDartifyRequired","dartify","findNALUIndices","ParticipantKeyHandler","getTrackCryptor","FrameCryptor","FrameCryptor.sifGuard","FrameCryptor.setParticipant","unsetCryptorParticipant","main","Logger","printString","throwLateFieldNI","throwLateFieldADI","IterableExtension.firstWhereOrNull","getAlgoOptions","Interceptor.hashCode","Interceptor.==","Interceptor.toString","Interceptor.noSuchMethod","Interceptor.runtimeType","JSBool.toString","JSBool.hashCode","JSBool.runtimeType","JSNull.==","JSNull.toString","JSNull.hashCode","LegacyJavaScriptObject.toString","LegacyJavaScriptObject.hashCode","LegacyJavaScriptObject.runtimeType","JavaScriptFunction.toString","JavaScriptBigInt.toString","JavaScriptBigInt.hashCode","JavaScriptSymbol.toString","JavaScriptSymbol.hashCode","JSArray.add","JSArray.addAll","JSArray._addAllFromArray","JSArray.map","JSArray.elementAt","JSArray.toString","JSArray.iterator","JSArray.hashCode","JSArray.length","JSArray.[]","JSArray.[]=","JSArray.runtimeType","getRuntimeTypeOfArray","ArrayIterator.current","ArrayIterator.moveNext","ArrayIterator._current","JSNumber.toInt","JSNumber.truncateToDouble","JSNumber.toRadixString","JSNumber.toString","JSNumber.hashCode","JSNumber.%","JSNumber._tdivFast","JSNumber._tdivSlow","JSNumber._shrOtherPositive","JSNumber._shrBothPositive","JSNumber.runtimeType","JSInt.runtimeType","JSNumNotInt.runtimeType","JSString.endsWith","JSString.startsWith","JSString.substring","JSString.substring[function-entry$1]","JSString.*","JSString.lastIndexOf","JSString.toString","JSString.hashCode","JSString.runtimeType","JSString.length","JSString.[]","_CopyingBytesBuilder.add","_CopyingBytesBuilder._grow","_CopyingBytesBuilder.toBytes","NativeUint8List.fromList","_CopyingBytesBuilder.length","LateError.toString","ListIterable.iterator","ListIterable.map","ListIterator.current","ListIterator.moveNext","ListIterator._current","MappedIterable.iterator","MappedIterable.length","MappedIterator.moveNext","MappedIterator.current","MappedIterator._current","MappedListIterable.length","MappedListIterable.elementAt","WhereIterable.iterator","WhereIterable.map","WhereIterator.moveNext","WhereIterator.current","Symbol.hashCode","Symbol.toString","Symbol.==","ConstantMap.toString","ConstantStringMap.length","ConstantStringMap._keys","ConstantStringMap.containsKey","ConstantStringMap.[]","ConstantStringMap.forEach","ConstantStringMap.keys","_KeysOrValues.length","_KeysOrValues.iterator","_KeysOrValuesOrElementsIterator.current","_KeysOrValuesOrElementsIterator.moveNext","_KeysOrValuesOrElementsIterator._current","JSInvocationMirror.memberName","JSInvocationMirror.positionalArguments","JSInvocationMirror.namedArguments","Primitives.functionNoSuchMethod.","TypeErrorDecoder.matchTypeError","NullError.toString","JsNoSuchMethodError.toString","UnknownJsTypeError.toString","NullThrownFromJavaScriptException.toString","_StackTrace.toString","Closure.toString","StaticClosure.toString","BoundClosure.==","BoundClosure.hashCode","BoundClosure.toString","_CyclicInitializationError.toString","RuntimeError.toString","_AssertionError.toString","JsLinkedHashMap.keys","JsLinkedHashMap.length","JsLinkedHashMap.containsKey","JsLinkedHashMap._containsTableEntry","JsLinkedHashMap.[]","JsLinkedHashMap.internalGet","JsLinkedHashMap._getBucket","JsLinkedHashMap.[]=","JsLinkedHashMap.internalSet","JsLinkedHashMap.putIfAbsent","JsLinkedHashMap.remove","JsLinkedHashMap.forEach","JsLinkedHashMap._addHashTableEntry","JsLinkedHashMap._removeHashTableEntry","JsLinkedHashMap._modified","JsLinkedHashMap._newLinkedCell","JsLinkedHashMap._unlinkCell","JsLinkedHashMap.internalComputeHashCode","JsLinkedHashMap.internalFindBucketIndex","JsLinkedHashMap.toString","JsLinkedHashMap._newHashTable","LinkedHashMapKeysIterable.length","LinkedHashMapKeysIterable.iterator","LinkedHashMapKeyIterator.current","LinkedHashMapKeyIterator.moveNext","LinkedHashMapKeyIterator._current","initHooks.","NativeByteBuffer.runtimeType","NativeByteBuffer.asUint8List","NativeByteBuffer.asUint8List[function-entry$0]","NativeTypedData.buffer","NativeTypedData._invalidPosition","NativeTypedData._checkPosition","_UnmodifiableNativeByteBufferView.asUint8List","_UnmodifiableNativeByteBufferView.asUint8List[function-entry$0]","NativeByteData.runtimeType","NativeByteData._setInt8","NativeTypedArray.length","NativeTypedArrayOfDouble.[]","NativeTypedArrayOfInt.setRange","NativeFloat32List.runtimeType","NativeFloat64List.runtimeType","NativeInt16List.runtimeType","NativeInt16List.[]","NativeInt32List.runtimeType","NativeInt32List.[]","NativeInt8List.runtimeType","NativeInt8List.[]","NativeUint16List.runtimeType","NativeUint16List.[]","NativeUint32List.runtimeType","NativeUint32List.[]","NativeUint8ClampedList.runtimeType","NativeUint8ClampedList.length","NativeUint8ClampedList.[]","NativeUint8List.runtimeType","NativeUint8List.length","NativeUint8List.[]","NativeUint8List.sublist","NativeUint8List.sublist[function-entry$1]","Rti._eval","Rti._bind","_rtiBind","_Type.toString","_Error.toString","_AsyncRun._initializeScheduleImmediate.internalCallback","_AsyncRun._initializeScheduleImmediate.","_AsyncRun._scheduleImmediateJsOverride.internalCallback","_AsyncRun._scheduleImmediateWithSetImmediate.internalCallback","_TimerImpl.internalCallback","_AsyncAwaitCompleter.complete","_AsyncAwaitCompleter.completeError","_awaitOnObject.","_wrapJsFunctionForAsync.","AsyncError.toString","_BroadcastSubscription._onPause","_BroadcastSubscription._onResume","_BroadcastSubscription._next","_BroadcastSubscription._previous","_BroadcastStreamController._mayAddEvent","_BroadcastStreamController._subscribe","_DoneStreamSubscription","_BufferingStreamSubscription","_BufferingStreamSubscription._registerDataHandler","_BufferingStreamSubscription.zoned","_BufferingStreamSubscription._registerDoneHandler","_BroadcastSubscription","_BroadcastStreamController._addEventError","_BroadcastStreamController._forEachListener","_BroadcastStreamController._callOnCancel","_BroadcastStreamController._firstSubscription","_BroadcastStreamController._lastSubscription","_SyncBroadcastStreamController._mayAddEvent","_SyncBroadcastStreamController._addEventError","_SyncBroadcastStreamController._sendData","_SyncBroadcastStreamController._sendData.","_SyncBroadcastStreamController__sendData_closure","_Completer.completeError","_Completer.completeError[function-entry$1]","_AsyncCompleter.complete","_FutureListener.matchesErrorTest","_FutureListener._errorTest","_FutureListener.handleError","_Future.then","_Future._thenAwait","_Future._setErrorObject","_Future._cloneResult","_Future._addListener","_Future._prependListeners","_Future._removeListeners","_Future._reverseListeners","_Future._chainForeignFuture","_Future._completeWithValue","_Future._completeWithResultOf","_Future._completeError","_Future._setError","_Future._asyncComplete","_Future._asyncCompleteWithValue","_Future._chainFuture","_Future._asyncCompleteError","_Future._addListener.","_Future._prependListeners.","_Future._chainForeignFuture.","_Future._chainCoreFuture.","_Future._asyncCompleteWithValue.","_Future._asyncCompleteError.","_Future._propagateToListeners.handleWhenCompleteCallback","_FutureListener.handleWhenComplete","_FutureListener._whenCompleteAction","_Future._newFutureWithSameType","_Future._propagateToListeners.handleWhenCompleteCallback.","_Future._propagateToListeners.handleValueCallback","_FutureListener.handleValue","_FutureListener._onValue","_Future._propagateToListeners.handleError","_FutureListener.hasErrorCallback","Stream.length","Stream.length.","Stream_length_closure","_Future._complete","_ControllerStream.hashCode","_ControllerStream.==","_ControllerSubscription._onPause","_ControllerSubscription._onResume","_BufferingStreamSubscription._add","_BufferingStreamSubscription._onPause","_BufferingStreamSubscription._onResume","_BufferingStreamSubscription._addPending","_BufferingStreamSubscription._sendData","_BufferingStreamSubscription._checkState","_BufferingStreamSubscription._mayResumeInput","_BufferingStreamSubscription._pending","_StreamImpl.listen","_StreamImpl.listen[function-entry$1]","_PendingEvents.schedule","_PendingEvents.schedule.","_PendingEvents.handleNext","_DoneStreamSubscription._onMicrotask","_DoneStreamSubscription._onDone","_rootHandleError.","_RootZone.runGuarded","_RootZone.runUnaryGuarded","_RootZone.bindCallbackGuarded","_RootZone.[]","_RootZone.run","_RootZone.runUnary","_RootZone.runBinary","_RootZone.registerBinaryCallback","_RootZone.bindCallbackGuarded.","_HashMap.keys","_HashMap.length","_HashMap.containsKey","_HashMap._containsKey","_HashMap.[]","_HashMap._get","_HashMap.[]=","_IdentityHashMap._computeHashCode","_HashMap.forEach","_HashMap._computeKeys","_HashMap._addHashTableEntry","_HashMap._getBucket","_IdentityHashMap._findBucketIndex","_HashMapKeyIterable.length","_HashMapKeyIterable.iterator","_HashMapKeyIterator.current","_HashMapKeyIterator.moveNext","_HashMapKeyIterator._current","ListBase.iterator","ListBase.elementAt","ListBase.map","ListBase.toString","MapBase.forEach","MapBase.length","MapBase.toString","MapBase.mapToString.","StringBuffer.write","MapView.[]","MapView.forEach","MapView.length","MapView.keys","MapView.toString","Base64Encoder.convert","_Base64Encoder.encode","Base64Decoder.convert","_Base64Decoder.decode","NoSuchMethodError.toString.","_symbolToString","DateTime.==","DateTime.hashCode","DateTime.toString","_Enum.toString","Error.stackTrace","AssertionError.toString","ArgumentError._errorName","ArgumentError._errorExplanation","ArgumentError.toString","RangeError.invalidValue","RangeError._errorName","RangeError._errorExplanation","IndexError.invalidValue","IndexError._errorName","IndexError._errorExplanation","NoSuchMethodError.toString","UnsupportedError.toString","UnimplementedError.toString","StateError.toString","ConcurrentModificationError.toString","OutOfMemoryError.toString","OutOfMemoryError.stackTrace","StackOverflowError.toString","StackOverflowError.stackTrace","_Exception.toString","FormatException.toString","Iterable.map","Iterable.length","Iterable.elementAt","Iterable.toString","Null.hashCode","Null.toString","Object.hashCode","Object.==","Object.toString","Object.noSuchMethod","Object.runtimeType","_StringStackTrace.toString","StringBuffer.length","StringBuffer.toString","jsify._convert","promiseToFuture.","dartify.convert","DateTime._withValueChecked","_dateToDateTime","NullRejectionException.toString","_JSSecureRandom","_JSSecureRandom.nextInt","NativeByteData.setUint32","CryptorError._enumToString","FrameCryptor.enabled","FrameCryptor.setupTransform","FrameCryptor.setupTransform[function-entry$0$kind$operation$readable$trackId$writable]","FrameCryptor.postMessage","FrameCryptor.getUnencryptedBytes","FrameCryptor.readFrameInfo","FrameCryptor.enqueueFrame","FrameCryptor.encodeFunction","FrameCryptor.makeIv","NativeByteData.setInt8","BytesBuilder","FrameCryptor.decodeFunction","DateTime._now","DateTime.now","ParticipantKeyHandler.decryptionSuccess","FrameCryptor.decodeFunction.decryptFrameInternal","FrameCryptor.decodeFunction.ratchedKeyInternal","KeyOptions.toString","KeyProvider.getParticipantKeyHandler","ListBase.isNotEmpty","KeyProvider.getSharedKeyHandler","ParticipantKeyHandler.decryptionFailure","ParticipantKeyHandler.exportKey","ParticipantKeyHandler.ratchetKey","ParticipantKeyHandler.ratchetMaterial","ParticipantKeyHandler.getKeySet","ParticipantKeyHandler.setKey","ParticipantKeyHandler.setKey[function-entry$1]","ParticipantKeyHandler.setKeySetFromMaterial","ParticipantKeyHandler.deriveKeys","ParticipantKeyHandler.ratchet","ParticipantKeyHandler._#ParticipantKeyHandler#cryptoKeyRing#A","SifGuard.recordUserFrame","SifGuard.reset","getTrackCryptor.","unsetCryptorParticipant.","main.","print","base64Decode","Base64Codec.decode","KeyProvider","JSArray.where","FrameCryptor.setEnabled","KeyProvider.setSharedKey","base64Encode","Codec.encode","FrameCryptor.setKeyIndex","FrameCryptor.setSifTrailer","FrameCryptor.updateCodec","main..","Level.==","Level.hashCode","Level.toString","LogRecord.toString","Logger.fullName","JSString.isNotEmpty","Logger.level","Logger.log","Logger.isLoggable","Logger._getStream","StreamController.broadcast","_BroadcastStreamController.stream","Logger._publish","Logger._controller","Logger.","Logger._named","Logger._internal","DART_CLOSURE_PROPERTY_NAME","_CopyingBytesBuilder._emptyList","TypeErrorDecoder.noSuchMethodPattern","TypeErrorDecoder.notClosurePattern","TypeErrorDecoder.nullCallPattern","TypeErrorDecoder.nullLiteralCallPattern","TypeErrorDecoder.undefinedCallPattern","TypeErrorDecoder.undefinedLiteralCallPattern","TypeErrorDecoder.nullPropertyPattern","TypeErrorDecoder.nullLiteralPropertyPattern","TypeErrorDecoder.undefinedPropertyPattern","TypeErrorDecoder.undefinedLiteralPropertyPattern","_AsyncRun._scheduleImmediateClosure","_Base64Decoder._inverseAlphabet","NativeInt8List.fromList","_Base64Decoder._emptyBuffer","_hashSeed","Random._secureRandom","logger","Logger.root","","ArrayIterator","AsyncError","Base64Codec","Base64Decoder","Base64Encoder","BoundClosure","ByteBuffer","ByteData","Closure","Closure0Args","Closure2Args","Codec","ConstantMap","ConstantMapView","ConstantStringMap","Converter","CryptorError","DateTime","EfficientLengthIterable","EfficientLengthMappedIterable","Error","ExceptionAndStackTrace","FixedLengthListMixin","Float32List","Float64List","FrameCryptor_decodeFunction_decryptFrameInternal","FrameCryptor_decodeFunction_ratchedKeyInternal","FrameInfo","Function","Future","IndexError","Int16List","Int32List","Int8List","Interceptor","Invocation","Iterable","IterableExtension|firstWhereOrNull","Iterator","JSArray","JSBool","JSInt","JSInvocationMirror","JSNull","JSNumNotInt","JSNumber","JSObject","JSString","JSUnmodifiableArray","JS_CONST","JavaScriptBigInt","JavaScriptFunction","JavaScriptIndexingBehavior","JavaScriptObject","JavaScriptSymbol","JsLinkedHashMap","KeyOptions","KeySet","LateError","LegacyJavaScriptObject","Level","LinkedHashMap","LinkedHashMapCell","LinkedHashMapKeyIterator","LinkedHashMapKeysIterable","List","ListBase","ListIterable","ListIterator","LogRecord","Logger_Logger_closure","Map","MapBase","MapBase_mapToString_closure","MapView","MappedIterator","MappedListIterable","NativeByteBuffer","NativeFloat32List","NativeFloat64List","NativeInt16List","NativeInt32List","NativeInt8List","NativeTypedArray","NativeTypedArrayOfDouble","NativeTypedArrayOfInt","NativeTypedData","NativeUint16List","NativeUint32List","NativeUint8ClampedList","NoSuchMethodError","NoSuchMethodError_toString_closure","Null","NullError","NullRejectionException","NullThrownFromJavaScriptException","Object","OutOfMemoryError","Pattern","PlainJavaScriptObject","Primitives_functionNoSuchMethod_closure","RangeError","Record","Rti","RuntimeError","SentinelValue","SifGuard","StackOverflowError","StackTrace","StaticClosure","Stream","StreamController","StreamSubscription","String","StringBuffer","Symbol","TearOffClosure","TrustedGetRuntimeType","TypeError","TypeErrorDecoder","Uint16List","Uint32List","Uint8ClampedList","Uint8List","UnknownJavaScriptObject","UnknownJsTypeError","UnmodifiableMapView","WhereIterable","WhereIterator","Zone","_AddStreamState","_AssertionError","_AsyncAwaitCompleter","_AsyncCallbackEntry","_AsyncCompleter","_AsyncRun__initializeScheduleImmediate_closure","_AsyncRun__initializeScheduleImmediate_internalCallback","_AsyncRun__scheduleImmediateJsOverride_internalCallback","_AsyncRun__scheduleImmediateWithSetImmediate_internalCallback","_Base64Decoder","_Base64Encoder","_BroadcastStream","_BroadcastStreamController","_Completer","_ControllerStream","_ControllerSubscription","_CopyingBytesBuilder","_CyclicInitializationError","_DelayedData","_DelayedEvent","_Enum","_Error","_EventDispatch","_Exception","_FunctionParameters","_Future","_FutureListener","_Future__addListener_closure","_Future__asyncCompleteError_closure","_Future__asyncCompleteWithValue_closure","_Future__chainCoreFuture_closure","_Future__chainForeignFuture_closure","_Future__prependListeners_closure","_Future__propagateToListeners_handleError","_Future__propagateToListeners_handleValueCallback","_Future__propagateToListeners_handleWhenCompleteCallback","_Future__propagateToListeners_handleWhenCompleteCallback_closure","_HashMap","_HashMapKeyIterable","_HashMapKeyIterator","_IdentityHashMap","_JS_INTEROP_INTERCEPTOR_TAG","_KeysOrValues","_KeysOrValuesOrElementsIterator","_NativeTypedArrayOfDouble&NativeTypedArray&ListMixin","_NativeTypedArrayOfDouble&NativeTypedArray&ListMixin&FixedLengthListMixin","_NativeTypedArrayOfInt&NativeTypedArray&ListMixin","_NativeTypedArrayOfInt&NativeTypedArray&ListMixin&FixedLengthListMixin","_PendingEvents","_PendingEvents_schedule_closure","_Required","_RootZone","_RootZone_bindCallbackGuarded_closure","_StackTrace","_StreamControllerLifecycle","_StreamImpl","_StreamIterator","_StringStackTrace","_SyncBroadcastStreamController","_TimerImpl_internalCallback","_TypeError","_UnmodifiableMapMixin","_UnmodifiableMapView&MapView&_UnmodifiableMapMixin","_UnmodifiableNativeByteBufferView","_Zone","_allocateBuffer","_awaitOnObject_closure","_canonicalRecipeJoin","_canonicalRecipeJoinNamed","_chainCoreFuture","_checkPadding","_computeFieldNamed","_computeSignatureFunctionNewRti","_createFutureOrRti","_createGenericFunctionRti","_createQuestionRti","_createStarRti","_current","_empty","_emptyBuffer","_emptyList","_fourDigits","_generalApplyFunction","_getCanonicalRecipe","_getFutureFromFutureOr","_getQuestionFromStar","_getTableEntry","_identityHashCodeProperty","_initializeScheduleImmediate","_installTypeTests","_interceptorFieldNameCache","_inverseAlphabet","_isInCallbackLoop","_isUnionOfFunctionType","_lastCallback","_lastPriorityCallback","_literal","_loggers","_lookupBindingRti","_lookupFunctionRti","_lookupFutureOrRti","_lookupGenericFunctionParameterRti","_lookupGenericFunctionRti","_lookupInterfaceRti","_lookupQuestionRti","_lookupRecordRti","_lookupStarRti","_lookupTerminalRti","_newHashTable","_nextCallback","_nextNumber","_objectTypeNameNewRti","_of","_propagateToListeners","_receiverFieldNameCache","_rootHandleError_closure","_scheduleImmediateClosure","_scheduleImmediateJsOverride","_scheduleImmediateWithSetImmediate","_scheduleImmediateWithTimer","_secureRandom","_setTableEntry","_stringFromUint8List","_threeDigits","_throw","_trimPaddingChars","_twoDigits","_wrapJsFunctionForAsync_closure","_writeAll","addErasedTypes","addRules","alternateTagFunction","applyFunction","async__AsyncRun__scheduleImmediateJsOverride$closure","async__AsyncRun__scheduleImmediateWithSetImmediate$closure","async__AsyncRun__scheduleImmediateWithTimer$closure","async___nullDoneHandler$closure","async___nullErrorHandler$closure","async___startMicrotaskLoop$closure","bind","bool","checkNotNegative","checkValidRange","collectArray","combine","compose","create","cspForwardCall","cspForwardInterceptedCall","current","dartify_convert","decodeChunk","defaultStackTrace","dispatchRecordsForInstanceTags","double","encodeChunk","eval","evalInEnvironment","evalRecipe","extractPattern","extractStackTrace","filled","findErasedType","findRule","finish","fixed","forType","forwardCallTo","forwardInterceptedCallTo","fromCharCodes","fromMessage","fromTearOff","functionNoSuchMethod","getDay","getHours","getInterceptor$","getInterceptor$asx","getInterceptor$ax","getInterceptor$x","getMilliseconds","getMinutes","getMonth","getSeconds","getTagFunction","getTrackCryptor_closure","getYear","handleArguments","handleDigit","handleExtendedOperations","handleIdentifier","handleTypeArguments","hash","indexToType","initHooks_closure","initNativeDispatchFlag","int","interceptorOf","interceptorsForUncacheableTags","iterableToFullString","iterableToShortString","jsify__convert","keyProviders","lazyAsJsDate","main__closure","main_closure","mapToString","markFixed","newArrayOrEmpty","noSuchMethodPattern","notClosurePattern","nullCallPattern","nullLiteralCallPattern","nullLiteralPropertyPattern","nullPropertyPattern","num","objectAssign","objectTypeName","of","parse","participantCryptors","promiseToFuture_closure","prototypeForTagFunction","provokeCallErrorOn","provokePropertyErrorOn","range","receiverOf","root","safeToString","stringFromNativeUint8List","throwWithStackTrace","toStringVisiting","toType","toTypes","toTypesNamed","trySetStackTrace","undefinedCallPattern","undefinedLiteralCallPattern","undefinedLiteralPropertyPattern","undefinedPropertyPattern","unsetCryptorParticipant_closure","value","view","withInvocation","withLength","$add","$and","$div","$eq","$ge","$gt","$index","$indexSet","$le","$lt","$mod","$mul","$negate","$or","$shl","$shr","$sub","$tdiv","$xor","%","*","==","[]","[]=","_addEventError","_captured_T_1","_captured__convertedObjects_0","_captured_arguments_2","_captured_bodyFunction_0","_captured_completer_0","_captured_data_1","_captured_decryptFrameInternal_3","_captured_dispatch_1","_captured_div_1","_captured_e_1","_captured_f_1","_captured_getTag_0","_captured_getUnknownTag_0","_captured_handleMessage_0","_captured_hasError_2","_captured_headerLength_5","_captured_ivLength_6","_captured_iv_3","_captured_joinedResult_0","_captured_namedArgumentList_1","_captured_originalSource_1","_captured_protected_0","_captured_prototypeForTag_0","_captured_s_2","_captured_sb_1","_captured_sourceResult_1","_captured_span_2","_captured_srcFrame_4","_captured_target_1","_captured_this_0","abs","add","addAll","argumentCount","asUint8List","bindCallback","bindCallbackGuarded","buffer","call","callback","ceilToDouble","checkGrowable","children","close","code","codeUnitAt","codec","comma","complete","completeError","config","consecutiveSifCount","contains","containsKey","convert","count","createBuffer","cryptoKeyRing","currentKeyIndex","currentkeySet","dart:_interceptors#_addAllFromArray","dart:_interceptors#_codeUnitAt","dart:_interceptors#_current=","dart:_interceptors#_index","dart:_interceptors#_isInt32","dart:_interceptors#_iterable","dart:_interceptors#_length","dart:_interceptors#_shlPositive","dart:_interceptors#_shrBothPositive","dart:_interceptors#_shrOtherPositive","dart:_interceptors#_shrReceiverPositive","dart:_interceptors#_tdivFast","dart:_interceptors#_tdivSlow","dart:_interceptors#_toListFixed","dart:_interceptors#_toListGrowable","dart:_internal#_buffer","dart:_internal#_current=","dart:_internal#_f","dart:_internal#_grow","dart:_internal#_index","dart:_internal#_iterable","dart:_internal#_iterator","dart:_internal#_length","dart:_internal#_message","dart:_internal#_name","dart:_internal#_source","dart:_js_helper#_addHashTableEntry","dart:_js_helper#_arguments","dart:_js_helper#_argumentsExpr","dart:_js_helper#_box_0","dart:_js_helper#_captured_arguments_2","dart:_js_helper#_captured_getTag_0","dart:_js_helper#_captured_getUnknownTag_0","dart:_js_helper#_captured_namedArgumentList_1","dart:_js_helper#_captured_prototypeForTag_0","dart:_js_helper#_cell","dart:_js_helper#_containsTableEntry","dart:_js_helper#_current=","dart:_js_helper#_deleteTableEntry","dart:_js_helper#_elements","dart:_js_helper#_exception","dart:_js_helper#_expr","dart:_js_helper#_first","dart:_js_helper#_getBucket","dart:_js_helper#_getTableBucket","dart:_js_helper#_getTableCell","dart:_js_helper#_index","dart:_js_helper#_interceptor","dart:_js_helper#_internalName","dart:_js_helper#_irritant","dart:_js_helper#_jsIndex","dart:_js_helper#_keys","dart:_js_helper#_kind","dart:_js_helper#_last","dart:_js_helper#_length","dart:_js_helper#_map","dart:_js_helper#_memberName","dart:_js_helper#_message","dart:_js_helper#_method","dart:_js_helper#_modifications","dart:_js_helper#_modified","dart:_js_helper#_name","dart:_js_helper#_namedArgumentNames","dart:_js_helper#_newHashTable","dart:_js_helper#_newLinkedCell","dart:_js_helper#_next","dart:_js_helper#_nums","dart:_js_helper#_pattern","dart:_js_helper#_previous","dart:_js_helper#_receiver","dart:_js_helper#_removeHashTableEntry","dart:_js_helper#_rest","dart:_js_helper#_setKeys","dart:_js_helper#_setTableEntry","dart:_js_helper#_strings","dart:_js_helper#_target","dart:_js_helper#_trace","dart:_js_helper#_typeArgumentCount","dart:_js_helper#_unlinkCell","dart:_js_helper#_values","dart:_native_typed_data#_checkMutable","dart:_native_typed_data#_checkPosition","dart:_native_typed_data#_data","dart:_native_typed_data#_getUint32","dart:_native_typed_data#_invalidPosition","dart:_native_typed_data#_isUnmodifiable","dart:_native_typed_data#_nativeBuffer","dart:_native_typed_data#_setInt8","dart:_native_typed_data#_setRangeFast","dart:_native_typed_data#_setUint32","dart:_rti#_as","dart:_rti#_bind","dart:_rti#_bindCache","dart:_rti#_cachedRuntimeType","dart:_rti#_canonicalRecipe","dart:_rti#_dynamicCheckData","dart:_rti#_eval","dart:_rti#_evalCache","dart:_rti#_is","dart:_rti#_isSubtypeCache","dart:_rti#_kind","dart:_rti#_message","dart:_rti#_named","dart:_rti#_optionalPositional","dart:_rti#_precomputed1","dart:_rti#_primary","dart:_rti#_requiredPositional","dart:_rti#_rest","dart:_rti#_rti","dart:_rti#_specializedTestResource","dart:async#_add","dart:async#_addEventError","dart:async#_addListener","dart:async#_addPending","dart:async#_addStreamState","dart:async#_asyncComplete","dart:async#_asyncCompleteError","dart:async#_asyncCompleteWithValue","dart:async#_box_0","dart:async#_box_1","dart:async#_callOnCancel","dart:async#_canFire","dart:async#_cancelFuture","dart:async#_captured_bodyFunction_0","dart:async#_captured_callback_0","dart:async#_captured_callback_1","dart:async#_captured_data_1","dart:async#_captured_dispatch_1","dart:async#_captured_div_1","dart:async#_captured_e_1","dart:async#_captured_error_0","dart:async#_captured_error_1","dart:async#_captured_f_1","dart:async#_captured_future_1","dart:async#_captured_hasError_2","dart:async#_captured_joinedResult_0","dart:async#_captured_listener_1","dart:async#_captured_originalSource_1","dart:async#_captured_protected_0","dart:async#_captured_s_2","dart:async#_captured_sourceResult_1","dart:async#_captured_span_2","dart:async#_captured_stackTrace_1","dart:async#_captured_stackTrace_2","dart:async#_captured_target_1","dart:async#_captured_this_0","dart:async#_captured_this_1","dart:async#_captured_value_1","dart:async#_chainForeignFuture","dart:async#_chainFuture","dart:async#_chainSource","dart:async#_checkState","dart:async#_clearPendingComplete","dart:async#_cloneResult","dart:async#_complete","dart:async#_completeError","dart:async#_completeWithResultOf","dart:async#_completeWithValue","dart:async#_controller","dart:async#_createSubscription","dart:async#_doneFuture","dart:async#_error","dart:async#_errorTest","dart:async#_eventScheduled","dart:async#_eventState","dart:async#_expectsEvent","dart:async#_firstSubscription=","dart:async#_forEachListener","dart:async#_future","dart:async#_handle","dart:async#_hasError","dart:async#_hasOneListener","dart:async#_hasPending","dart:async#_hasValue","dart:async#_ignoreError","dart:async#_isCanceled","dart:async#_isChained","dart:async#_isComplete","dart:async#_isEmpty","dart:async#_isFiring","dart:async#_isInputPaused","dart:async#_isPaused","dart:async#_lastSubscription","dart:async#_mayAddEvent","dart:async#_mayAddListener","dart:async#_mayComplete","dart:async#_mayResumeInput","dart:async#_newFutureWithSameType","dart:async#_next=","dart:async#_nextListener","dart:async#_onData","dart:async#_onDone=","dart:async#_onError","dart:async#_onListen","dart:async#_onMicrotask","dart:async#_onPause","dart:async#_onResume","dart:async#_onValue","dart:async#_once","dart:async#_pending","dart:async#_prependListeners","dart:async#_previous=","dart:async#_recordPause","dart:async#_recordResume","dart:async#_removeAfterFiring","dart:async#_removeListener","dart:async#_removeListeners","dart:async#_resultOrListeners","dart:async#_reverseListeners","dart:async#_scheduleMicrotask","dart:async#_sendData","dart:async#_setChained","dart:async#_setError","dart:async#_setErrorObject","dart:async#_setPendingComplete","dart:async#_setValue","dart:async#_state","dart:async#_stateData","dart:async#_subscribe","dart:async#_subscription","dart:async#_thenAwait","dart:async#_tick","dart:async#_toggleEventId","dart:async#_whenCompleteAction","dart:async#_zone","dart:collection#_addHashTableEntry","dart:collection#_box_0","dart:collection#_captured_result_1","dart:collection#_computeHashCode","dart:collection#_computeKeys","dart:collection#_containsKey","dart:collection#_current=","dart:collection#_findBucketIndex","dart:collection#_get","dart:collection#_getBucket","dart:collection#_keys","dart:collection#_length","dart:collection#_map","dart:collection#_nums","dart:collection#_offset","dart:collection#_remove","dart:collection#_removeHashTableEntry","dart:collection#_rest","dart:collection#_set","dart:collection#_strings","dart:convert#_alphabet","dart:convert#_encoder","dart:convert#_state","dart:convert#_urlSafe","dart:core#_arguments","dart:core#_box_0","dart:core#_captured_sb_1","dart:core#_contents","dart:core#_enumToString","dart:core#_errorExplanation","dart:core#_errorName","dart:core#_existingArgumentNames","dart:core#_hasValue","dart:core#_memberName","dart:core#_microsecond","dart:core#_name","dart:core#_namedArguments","dart:core#_receiver","dart:core#_stackTrace","dart:core#_value","dart:core#_writeString","dart:js_util#_captured_T_1","dart:js_util#_captured__convertedObjects_0","dart:js_util#_captured_completer_0","dart:math#_buffer","dart:math#_getRandomBytes","dartException","day","decode","decodeFunction","decoder","decrypted","decryptionFailure","decryptionSuccess","deriveKeys","discardFrameWhenCryptorNotReady","elementAt","enabled","encode","encodeFunction","encoder","encryptionKey","end","endsWith","enqueueFrame","error","errorCallback","errorZone","exportKey","failureTolerance","fine","finer","first","firstPendingEvent","floorToDouble","forEach","frameType","fullName","future","getKeySet","getParticipantKeyHandler","getRange","getSharedKeyHandler","getUint32","getUnencryptedBytes","handleError","handleNext","handleUncaughtError","handleValue","handleWhenComplete","handlesComplete","handlesError","handlesValue","hasErrorCallback","hasErrorTest","hasValidKey","hashCode","hashMapCellKey","hashMapCellValue","hour","id","inSameErrorZone","index","indexable","info","initialKeyIndex","initialKeySet","internalComputeHashCode","internalContainsKey","internalFindBucketIndex","internalGet","internalRemove","internalSet","invalidValue","isAccessor","isClosed","isEmpty","isGetter","isLoggable","isNotEmpty","isScheduled","isSifAllowed","isSync","isUndefined","isUtc","iterator","join","keyHandler","keyOptions","keyProviderOptions","keyRingSze","keys","kind","lastError","lastIndexOf","lastPendingEvent","lastSifReceivedAt","length","lengthInBytes","level","listen","listener","listenerHasError","listenerValueOrError","listeners","log","loggerName","makeIv","map","matchAsPrefix","matchTypeError","matchesErrorTest","material","memberName","message","microsecond","millisecond","millisecondsSinceEpoch","minute","modifiedObject","month","moveNext","name","namedArguments","names","next","nextInt","noSuchMethod","object","offset","offsetInBytes","onCancel","onListen","onRecord","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_#FrameCryptor#kind#A","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_box_0","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_box_1","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_decryptFrameInternal_3","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_headerLength_5","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_ivLength_6","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_iv_3","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_srcFrame_4","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_captured_this_2","package:dart_webrtc/src/e2ee.worker/e2ee.cryptor.dart#_enabled","package:dart_webrtc/src/e2ee.worker/e2ee.keyhandler.dart#_#ParticipantKeyHandler#cryptoKeyRing#A","package:dart_webrtc/src/e2ee.worker/e2ee.keyhandler.dart#_decryptionFailureCount","package:dart_webrtc/src/e2ee.worker/e2ee.keyhandler.dart#_hasValidKey","package:dart_webrtc/src/e2ee.worker/e2ee.worker.dart#_captured_handleMessage_0","package:dart_webrtc/src/e2ee.worker/e2ee.worker.dart#_captured_trackId_0","package:logging/src/logger.dart#_captured_name_0","package:logging/src/logger.dart#_children","package:logging/src/logger.dart#_controller","package:logging/src/logger.dart#_getStream","package:logging/src/logger.dart#_level","package:logging/src/logger.dart#_levelChangedController","package:logging/src/logger.dart#_publish","padLeft","parent","participantIdentity","participantKeys","perform","positionalArguments","postMessage","putIfAbsent","ratchet","ratchetCount","ratchetKey","ratchetMaterial","ratchetSalt","ratchetWindowSize","readFrameInfo","recordSif","recordUserFrame","registerBinaryCallback","registerCallback","registerUnaryCallback","remainder","remove","removeLast","reset","resetKeyStatus","result","run","runBinary","runGuarded","runUnary","runUnaryGuarded","runtimeType","schedule","second","sendCounts","sequenceNumber","setEnabled","setInt8","setKey","setKeyIndex","setKeySetFromMaterial","setParticipant","setRange","setSharedKey","setSifTrailer","setUint32","setupTransform","sharedKey","sharedKeyHandler","shouldChain","sifGuard","sifSequenceStartedAt","skip","source","ssrc","stackTrace","start","startsWith","state","storedCallback","stream","sublist","substring","take","then","time","timestamp","toBytes","toInt","toList","toLowerCase","toRadixString","toString","trackId","truncateToDouble","uncryptedMagicBytes","unsetParticipant","updateCodec","userFramesSinceSif","variableName","warning","where","worker","write","writeAll","year","zone","Rti._unstar","isTopType","_Universe._canonicalRecipeOfStar","_Universe._canonicalRecipeOfQuestion","_Universe._canonicalRecipeOfFutureOr","_Universe._canonicalRecipeOfBinding","_Universe._canonicalRecipeOfGenericFunction","Error._stringToSafeString","_HashMap._set","_Base64Encoder.createBuffer","DateTime.fromMillisecondsSinceEpoch","SifGuard.recordSif","SifGuard.isSifAllowed",">=","ByteBufferToJSArrayBuffer|get#toJS","JSAnyUtilityExtension|dartify","JSNumberToNumber|get#toDartInt","JSObjectUnsafeUtilExtension|getProperty","JSPromiseToFuture|get#toDart","NullableObjectUtilExtension|jsify","_","_addListener","_asCheck","_buffer","_callConstructorUnchecked1","_callMethodUnchecked0","_callMethodUnchecked1","_callMethodUnchecked2","_callMethodUnchecked3","_canonicalRecipeOfBinding","_canonicalRecipeOfFunction","_canonicalRecipeOfFunctionParameters","_canonicalRecipeOfFutureOr","_canonicalRecipeOfGenericFunction","_canonicalRecipeOfInterface","_canonicalRecipeOfQuestion","_canonicalRecipeOfRecord","_canonicalRecipeOfStar","_chainSource","_checkMutable","_cloneResult","_complete","_completeError","_computeHashCode","_computeIdentityHashCodeProperty","_containsTableEntry","_create1","_create2","_create3","_createBindingRti","_createFunctionRti","_createGenericFunctionParameterRti","_createInterfaceRti","_createLength","_createRecordRti","_createSubscription","_createTerminalRti","_createTimer","_error","_errorTest","_expectsEvent","_failedAsCheckError","_findRule","_future","_getBindCache","_getBindingArguments","_getBindingBase","_getBucket","_getCachedRuntimeType","_getEvalCache","_getFunctionParameters","_getFutureOrArgument","_getGenericFunctionBase","_getGenericFunctionBounds","_getGenericFunctionParameterIndex","_getInterfaceName","_getInterfaceTypeArguments","_getIsSubtypeCache","_getKind","_getNamed","_getOptionalPositional","_getPrimary","_getPropertyTrustType","_getQuestionArgument","_getRandomBytes","_getRecordFields","_getRecordPartialShapeTag","_getRequiredPositional","_getReturnType","_getRuntimeTypeOfArrayAsRti","_getSpecializedTestResource","_getStarArgument","_getTableBucket","_getTableCell","_grow","_handleIEtoString","_hasError","_hasOneListener","_hasPending","_hasTableEntry","_hasTimer","_installRti","_internal","_isCanceled","_isChained","_isCheck","_isClosure","_isComplete","_isDartObject","_isEmpty","_isFiring","_isInputPaused","_isSubtypeUncached","_keysFromIndex","_lookupAnyRti","_lookupDynamicRti","_lookupErasedRti","_lookupFutureRti","_lookupNeverRti","_lookupVoidRti","_mayAddListener","_mayComplete","_mayResumeInput","_name","_named","_newFutureWithSameType","_now","_objectToString","_ofArray","_onError","_onValue","_parseRecipe","_pow2roundup","_recipeJoin","_registerDataHandler","_registerDoneHandler","_removeListener","_removeListeners","_scheduleImmediate","_set","_setAsCheckFunction","_setBindCache","_setCachedRuntimeType","_setCanonicalRecipe","_setChained","_setError","_setErrorObject","_setEvalCache","_setIsTestFunction","_setKind","_setNamed","_setOptionalPositional","_setPrecomputed1","_setPrimary","_setRangeFast","_setRequiredPositional","_setRest","_setSpecializedTestResource","_setValue","_stateBits","_statePadding","_stringToSafeString","_target","_theUniverse","_trySetStackTrace","_unstar","_validate","_whenCompleteAction","_withValueChecked","_writeOne","_writeString","_zone","allocate","apply","arrayAt","arrayConcat","arrayLength","arraySplice","asBool","asInt","asRti","asRtiOrNull","asString","as_Type","broadcast","charCodeAt","collectNamed","constructorNameFallback","dateNow","dispatchRecordExtension","dispatchRecordIndexability","dispatchRecordInterceptor","dispatchRecordProto","environment","erasedTypes","evalCache","evalTypeVariable","fieldADI","fieldNI","fromList","fromMillisecondsSinceEpoch","getDispatchProperty","getIndex","getLegacyErasedRecipe","getLength","getName","getProperty","getRuntimeTypeOfInterceptorNotArray","handleNamedGroup","handleOptionalGroup","handleStartRecord","hash2","identityHashCode","instanceTypeName","interceptorFieldName","interceptorsByTag","isArray","isDigit","isIdentical","isJavaScriptSimpleObject","isRequired","jsHasOwnProperty","jsonEncodeNative","leafTags","listToString","lookupSupertype","lookupTypeVariable","mapGet","mapSet","markFixedList","markGrowable","markUnmodifiableList","normalize","now","objectKeys","objectToHumanReadableString","pop","position","pow","printToConsole","propertyGet","provokeCallErrorOnNull","provokeCallErrorOnUndefined","provokePropertyErrorOnNull","provokePropertyErrorOnUndefined","push","pushStackFrame","receiverFieldName","recipe","secure","sharedEmptyArray","stack","staticInteropGlobalContext","stringIndexOf","stringLastIndexOfUnchecked","stringSafeToString","stringSplit","thenAwait","toGenericFunctionParameter","tryStringifyException","typeRules","typed","universe","unmangleGlobalNameIfPreservedAnyways","unsafeCast","unvalidated","writeFinalChunk","zoned"], + "mappings": "A;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;sBA4FAA;MA6BEA,gEAEFA;K;wBASAC;;uBApDSA,KACiBA;MAsDxBA;aACMA;UACFA;yBAzDGA,KACiBA;;MA6DxBA;sBAhB6BA;QAkB3BA;UAAoBA,aAnBaA,EA0ErCA;QAtDIA;UAAmBA,aAsDvBA;QArDsBA;QAClBA;UACEA,aAvB+BA,EA0ErCA;kBAxEmCA;UA8B7BA,sBAAMA,kDAA4CA,IAD3BA;;2BAOTA;;QAEdA;;cAuCGC;;UCqpFAC,yCADgBA;kCD9oFjBF;;MA7CNA;QAAyBA,kBAkC3BA;MA9BgBA;MACdA;QAAyBA,kBA6B3BA;MAvBEA;QAIEA,QAHcA,2BAsBlBA;MAjBcA;MACZA;QAEEA,QAIcA,8BAUlBA;;QAPIA,QAHcA,8BAUlBA;MALEA;cAUOG;;UCqpFAD,yCADgBA;QCzxFvBC,iDF+HOH;QAFLA,QAEKA,gCACTA;;MADEA,QAAOA,gCACTA;K;yBG/KUI;MAWNA;QACEA,sBAAiBA;MAEnBA,OAAOA,4BAAqBA,uBAC9BA;K;6BA2EQC;MACkCA;;MAAtCA,SAAoEA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;sBCmE7DC;MACFA;MACAA;MACPA,wBACFA;K;qBAEWC;MACFA;MACAA;MACPA,gDACFA;K;oBA+oBAC;MAIAA,YACFA;K;sBA0SKC;MACHA;iBAAoBA,iBAAiBA,gBAArCA;wBAAoBA,iBACIA;UAAsBA,WAGhDA;MADEA,YACFA;K;iCCrwBUC;MACOA;QACXA,OAsBJA,sIAnBAA;MADEA,OAGFA,wGAFAA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBJ9RKC;0BKhFOA,mBACLA;MLiFPA;QAAuBA,gBAGzBA;MADEA,mBACFA;K;iBAuBKC;MACHA;;uBDF0CA;QCIxCA;UAAoBA,aAGxBA;;MADEA,OAAcA,oDAChBA;K;KAEOC;MACLA;;QAAqBA,YAmBvBA;MAlBEA;QACEA;UAEEA,iBAeNA;aAbSA;QACLA,aAYJA;WAXSA;QACLA,cAUJA;WATSA;QACLA,aAQJA;MANeA;MAKbA,aACFA;K;6BA2HaC;;oBAELA;;QAUFA;mBATUA;MACZA;;QAgJOC;;MA5IPD,WACFA;K;6BA0IcC;MACZA,iDACFA;K;oCAOcC;MACRA;MMgnBCA,uBNhnBuBA;QAG1BA,sBM4mBMA,6BNxkBVA;MAjCoBA;MAGPA,qBAFgBA,yCACAA;QCvLtBA,gBACHA;QDyMAA;UAAwCA,mBAY5CA;6BAXsBA;QAClBA;wCACwBA;UACtBA;YAEEA,sBAMRA;;;MADEA,OM0kBKA,eADGA,6BNxkBVA;K;2BAecC;MACkCA;QAC5CA,OAAOA,qBAcXA;MAZEA;QACEA,OAs2EGC,sBA31EPD;MAPWA;QAAPA,2BAOJA;MADEA,yBAvBcA,yCAwBhBA;K;wCAyFcE;MAGZA;MACSA,kDAD8CA;QACrDA,iDAcJA;MAXEA;QACkBA;QAOZA;;;MAENA,aACFA;K;2BA4HOC;;yCOljB2BA;MPujBhCA,eAAOA,KACTA;K;sBAmBWC;MACTA,eAAiBA,SAC4BA,2DACHA,qDAC5CA;K;uBAKWC;MACTA,eAAiBA,SAC4BA,wDACHA,kDAC5CA;K;qBAKWC;MACTA,eAAiBA,SAC6BA,uDACHA,iDAC7CA;K;uBAKWC;MACTA,eAAiBA,SAC8BA,wDACHA,kDAC9CA;K;yBAKWC;MACTA,eAAiBA,SACgCA,0DACHA,oDAChDA;K;yBAKWC;MACTA,eAAiBA,SACgCA,0DACHA,oDAChDA;K;8BAKWC;MACTA,eAAiBA,SAEoCA,+DACFA,yDACrDA;K;mCA2BOC;MAEDA;;MAMFA;MAiBkDA;QAlBlDA,oCAAqCA;MACrCA;QAGKA;kDQjzBWA;QRmzBhBA,4BAAuBA;MAWzBA,OAAOA,6BAvoBTC,0BAwoBMD,mDACNA;K;4BAiCOE;MAGLA;MAAwBA;qDQp2BNA;;QRo2BiBA;MAAnCA;2CAGgCA;QAC9BA;UAGWA;YAAPA,yBAiDRA;eA/CWA;UAGIA;YAAPA,+CA4CRA;eA1CWA;UAGIA;YAAPA,uEAuCRA;eApCWA;UAGIA;YAAPA,+FAiCRA;eA9BWA;UAGIA;YAAPA,uHA2BRA;eAxBWA;UAGIA;YAAPA,+IAqBRA;0BAPiBA;QACbA;UACEA,OAAOA,4CAKbA;;MADEA,OAAOA,kFACTA;K;oCAEOC;MAIqBA;gFAGLA;kCAMSA;0CAEDA;MAG7BA;QACEA,OAAOA,wEAuGXA;sCApG6BA;MAGKA;MAKDA;MAEbA;8BAEdA;MACJA;gCAGeA;MAGfA;QAIWA,4CQ38BOC;UR28BdD,+EA6ENA;QA3EIA;UACEA,OAAOA,uCA0EbA;QAxEIA,OAAOA,wEAwEXA;;MArEkDA;QAMrCA,4CQz9BOC;URy9BdD,+EA+DNA;6DA5DyBA;QAErBA;UAEEA,OAAOA,8DAwDbA;QAtDIA;UACyBA;UAEvBA;YAEmBA;UAEnBA;;QAEFA,OAAOA,uCA6CXA;;QAzCIA;UAGEA,OAAOA,wEAsCbA;QAnCIA;UAEmBA;QAGPA;QACZA;wBACEA;wCACqBA,iBADrBA;YAGWA,KAk4EyBA;cAl4EhCA,+EAyBVA;YAvBQA;;;wBAIFA;;YACMA;cACFA;cACAA,oCAAcA;;0CAEKA;cAEVA,KAq3EuBA;gBAr3E9BA,+EAYZA;cAVUA;;;UAKKA,2BQphCGA;YRohCVA,+EAKRA;;QAFIA,OAAOA,uCAEXA;;K;gCAEmBE;yBACHA;MACdA;QAAqBA,WAEvBA;MADEA,OAAOA,gCACTA;K;+BAEYC;MACNA;eAAUA;QAEFA;;QAEyBA;;IAEvCA,C;OAOFC;MACEA,sBAAMA;IACRA,C;SAQAC;MACEA;QAA+BA;MAC/BA,sBAAMA;IACRA,C;sBAKMC;MACJA;;QAAmBA,OSj6BnBA,4CT46BFA;MAVMA,mBAAmBA;MAIvBA;QACEA,OAAkBA,wDAKtBA;MADEA,OAAkBA,+BACpBA;K;sBAKMC;MAIJA;QACEA,OAAkBA,oDAYtBA;MAVEA;QAIEA;UACEA,OAAkBA,oDAKxBA;MADEA,OSj8BAA,2CTk8BFA;K;sBAOcC;MACZA,OS18BAA,6CT28BFA;K;iBAiCAC;MAEEA,OAAOA,6BADSA,gBAElBA;K;8BAGAC;MACEA;;QS/iCIA;;;MTmjCJA;QAKEA;;;QAgBKC;MAPPD,cACFA;K;mBAGAC;MAGEA,yBAAOA,eACTA;K;mBAOMC;MAEJA,MAAyBA;IAC3BA,C;8BAEMC;MACJA,MAAyBA;IAC3BA,C;6BAYMC;MAKMA;;QAAIA;;;MAEEA;MAChBA,6BACIA;IACNA,C;iCAGMC;MAEGA;MAGPA;QA8CkBA;;oJA3CFA;2BACIA;QACNA;QACZA;UAIgBA;UACNA;;yBAGEA;;wFAMEA,UAEPA;MAMHA;;MAFWA;MASjBA;QAEcA;WACPA;QAEOA;QADFA;;;MAQZA,OS5uBAA,kGT6uBFA;K;oCAuBAC;MACEA,sBAAMA;IACRA,C;mCAqJSC;MAULA;MAIUA,iCAJAA;MAUNA;MACJA;QAA2BA;MAKXA;MACIA;MACTA;MACEA;MACEA;MAiBfA,OArHFA,+SAyGmBA,uHAcnBA;K;uCAMcC;MAmDZA,OAReA;;;;;;;OAQRA,YACTA;K;2CAkCcC;MASZA,OAPeA;;;;;;OAORA,YACTA;K;wBA8CAC;;8BACuCA;MADvCA,gEAGiCA,UAHjCA;IAGuEA,C;mBA+ClEC;MAGLA;;QACEA,OA7BFA,2CA2CFA;;QAVWA,OAAsBA;QAA7BA,yCAA6BA,0BAUjCA;;MANEA;QAA6CA,SAM/CA;MAJEA;QACEA,OAAOA,uBAAmBA,eAG9BA;MADEA,OAAOA,6BACTA;K;kBAKOC;MACKA;iBACeA;;MAKzBA,YACFA;K;2BAEOC;MACLA;;QACEA,SAqGJA;kBAjGgBA;;mBAMCA;QAKKA;QACMA;UAKtBA;;cAEIA,OAAOA,qBACCA,uBAAsBA,qDA6ExCA;;;cA1EgDA;cAAtCA,OAAOA,qBA5HfA,kBAsMFA;;;MArEEA;QAE8BA;QACMA;QACFA;QACOA;QACNA;QACOA;QACJA;QACOA;QACNA;QACOA;QAC/BA;QAAbA;UACEA,OAAOA,qBAAmBA,uBAAoBA,6BAwDpDA;;UAvDwBA;UAAbA;YAMEA;YAAPA,4BAA0BA,uBAAoBA,6BAiDpDA;iBAhDwBA,kDACPA,qDACAA,+CACAA,sDACAA,kDACAA,qDACAA,mDACAA;YACyBA;YAApCA,OAAOA,qBA9JXA,kBAsMFA;;;QAlCIA,OAAOA,qBAtITA,oEAwKFA;;MA9BEA;QC1zDOA;UD4zDHA,OSnrCEA,0BT+sCRA;;;;;;;SAMSA;QAvBLA,OAAOA,qBSrpDTA,oETmpDcA,kDAmBhBA;;MAbEA;QAIEA;UACEA,OSvsCEA,0BT+sCRA;MADEA,SACFA;K;yBAqBWC;MACTA;;QACEA,gBAAiBA,WAiBrBA;MAfEA;QAAuBA,OAoBvBA,4BALFA;uBAduBA;MACrBA;QAAmBA,YAarBA;MAKEA;MAVAA;;MAIAA,YACFA;K;kBAwBIC;MAEFA;QAAoBA,OAAcA,uBAMpCA;MALEA;QACEA,OAAkBA,mCAItBA;MADEA,OAAcA,uBAChBA;K;kBAsBAC;;+BA+CSA;MA1CPA;QACoCA;QACEA;QACpCA,iCAkCKA;;MAhCPA,aACFA;K;kBAuCAC;MAIaA;MAFHA;;UAEJA,OAAOA,gBAWbA;;UATMA,OAAOA,oBASbA;;UAPMA,OAAOA,0BAObA;;UALMA,OAAOA,gCAKbA;;UAHMA,OAAOA,sCAGbA;;MADEA,sBAAMA;IACRA,C;0BAIAC;6BAEiBA;MACfA;QAAkCA,gBAIpCA;MAHaA;;MAEXA,gBACFA;K;kCAEAC;MAOUA;MACRA;;yBAEYA;UADVA;;yBAGUA;UADVA;;yBAGUA;UADVA;;yBAGUA;UADVA;;yBAGUA;UAVZA;;UAYIA;;MAAJA;QACEA,OAAOA,mBA0BXA;MAXEA;;;;OAAOA,kCAWTA;K;uBA4BSC;;8BAcDA;6BAGAA;kCAEAA;sCACqBA;yCAGrBA;gCAGAA;8BAEAA;2BAKUA;4BACKA;6BACAA;uBAOfA;QAAiEA;MA6B/DA,sCAoZEA,+CAlZFA,cAkbRA;yCA/a0CA;MAkBDA,0BAZjCA;;UAEIA;;;;;;;MAmBNA;MAAJA;QAEMA;;;QAWgBA;;MAJlBA;;MAOJA,yDAAgCA,SAAhCA;0BACiBA;QAGfA;2BAESA;UASaA;UAAUA;;UAZdA;gCAMKA;QAGvBA;UACEA;YAEMA;;;QAIRA;;;;+CAS+BA;4CAKQA;MAKzCA,mBACFA;K;2CAEOC;MAELA;QAEEA,mBAoBJA;MAlBEA;QAEEA;UAEEA;QAGFA;;;;SAAOA,yCAWXA;;MADEA;IACFA,C;0BAEOC;;MAiBLA;;UAEIA;;;;WAAOA,uBAuEbA;;UA7DMA;;;;WAAOA,uBA6DbA;;UAnDMA;;;;WAAOA,uBAmDbA;;UAzCMA;;;;WAAOA,uBAyCbA;;UA/BMA;;;;WAAOA,uBA+BbA;;UArBMA;;;;WAAOA,uBAqBbA;;UAVMA;;;;WAAOA,wBAUbA;;K;yBAIOC;MAELA;QACEA,OAAOA,0EA4BXA;MAxBIA,OAAOA,kCAHGA,gDA2BdA;K;qCAEOC;;;MAMLA;;UAIIA,sBAwZNA;;UAtZMA;;;;WAAOA,uCA+EbA;;UApEMA;;;;WAAOA,uCAoEbA;;UAzDMA;;;;WAAOA,uCAyDbA;;UA9CMA;;;;WAAOA,uCA8CbA;;UAnCMA;;;;WAAOA,uCAmCbA;;UAxBMA;;;;WAAOA,uCAwBbA;;UAbMA;;;;;;WAAOA,wCAabA;;K;oCAEOC;MAEEA;WA8ILA;QAA+BA;WAJ/BA;QAA4BA;uBAxIlBA;MAIHA;MAAPA,SAwBJA;K;sBAwBFC;MACEA,OAAeA,iCACjBA;K;2BAoESC;MACLA,OM5jEeC,kCAHOC,eAgDRF,sBN+gEuBA,oBACvCA;K;2BAIOG;MAAoCA,cAAQA,UAASA;K;8BAIrDC;MAAuCA,cAAQA,aAAYA;K;mCAYpDC;MA/CdA;;aAkDMA;;ME3gFGA;qBF4gFmBA,gBAA1BA;qBACaA;;UAETA,YAINA;;MADEA,sBAAMA;IACRA,C;uBA4FGC;MAEHA;QAAmBA;MACnBA,YACFA;K;eA+BKC;MACHA,sBAinBAA;IAhnBFA,C;mBAWKC;MACHA,sBAaAA;IAZFA,C;yBAoEOC;MAELA,OAAOA,IADgBA,qBAEzBA;K;kBC5xFKC;MACHA;IAOFA,C;6BAoEAC;MAESA;0BAAoBA,CAAdA;kBAIYA,+BA/HlBA;MAgIPA;QAlFAC,+BFeYC;QEmEQF,aF5BeE,EE+FrCF;;qBAlEgCA,+BAjIvBA;MAkIPA;QAAyBA,kBAiE3BA;6BA7HyBG,kBAtEhBA;MAuIPH;QACUA,sBAA6BA,CAApBA;QACjBA;oBAGuBA,+BA5IlBA;UA6IHA;YA/FJC,+BFeYC;YEgFYF,aFzCWE,EE+FrCF;;yBArDgCA,+BA9IvBA;UA+IHA;YAAyBA,kBAoD/BA;iCA7HyBG,kBAtEhBA;;;;MAqJPH;QAQEA,WAsCJA;oCAnCgBA;gBAEHA;MAEXA;QACWA;SACGA;QAxHdC,+BFeYC;QE0GVF,aFnEiCE,EE+FrCF;;MAzBEA;SACcA;QACZA,kBAuBJA;;MApBEA;QACyBA;QAlIzBC,sBA6JoBD,gCF9IRI;QEmHVJ,SF5EiCI,EE+FrCJ;;MAhBEA;QACEA,OAAOA,sCAeXA;MAZEA;QAEEA,sBAAMA;cA7GMA;QAmHWA;QAjJzBC,sBA6JoBD,gCF9IRI;QEkIVJ,SF3FiCI,EE+FrCJ;;QAFIA,OAAOA,sCAEXA;K;sBAYAK;MACcA;MAlKZJ,iCFeYI,+BEoJCA;MAEbA,kBACFA;K;0BAEAC;MAGEA,OAAOA,2FACTA;K;6BAEAC;wCACoBA;MAGTA,QApJKA;QAoJZA,4CAIJA;;QAFIA,OAAOA,oDAEXA;K;sBAgBKC;oBACSA;QAAwBA,MAGtCA;;MADEA;IACFA,C;8BAGKC;MACHA;MAAiCA;MACAA;MAEjCA;gBAzLuBA;MA+LRA;MAEfA;QACgBA;QACJA;;QACVA,oBAAyBA,SAAzBA;oBACYA;UACyBA,SAAvBA;UACZA;YAEeA,6CADUA;YAEvBA;cAlONR,iCFeYQ;;;;;;ME+NZA,oBAAyBA,SAAzBA;kBACYA;yBACNA;gCA9RCA;;;;;;;;IAuSTA,C;aAmCKC;MAESA;iBAAcA;MAiBlBA,iCACJA,cALIA,yBAAsBA,cAFtBA,yBADsBA,cAAtBA,yBAAsBA,cADtBA,yBAAsBA,cADtBA,yBAAsBA,cAHtBA,wBAFmCA,CACvCA,cAA+CA;MAqBnDA;QACqBA;QACnBA;UAGmCA;QAA/BA;UACFA,4BAAoBA,SAApBA;sCACoBA;YAClBA;cAmBSA;;;oBAZFA;2BACOA;6BACEA;MAELA;MAEbA;MAEAA;IACNA,C;yBAEAC;MAEEA,OADeA,2BAEjBA;K;6BShJQC;6BAGeA;wBAEPA,KAGGA;MAEjBA;QAGEA,WAsBJA;MAnBEA;QACEA,gBAkBJA;MANWA,yBAFWA;QAElBA,uCAMJA;MADEA,OAAOA,oBACTA;K;wBCpJAC;+BAGMA;QACFA,OAAOA,6CAGXA;MADEA,aACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MCgsByCC;;qBAlVpCC;MACsBA,WAM3BA;K;iCAOUD;MAA8BA,6CAA8BA;K;mCA6xB5DE;MAA+BA,OAkCUA,uBAlCyBA;K;wCAKlEC;MAGNA,yBAmCEA,wCAGAA,8CAnCJA;K;oBAgvBGC;MACHA;QACEA,sBAAMA;IAEVA,C;oBASIC;MACFA;MAAgCA;;UAEtBA;;UAC0CA;;QAHpBA;MAAhCA;QAIEA,sBAAMA;MAERA;QAAiBA,cAEnBA;MADEA,UACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BNz5DaC;MAIcA,kBA0kIaA;MAnkIpCA,6BAjBIA,6DAmlIyBC,2BAjkI/BD;K;8BAEWE;MA4jFPA,gBAogDkCA;MAzjIpCA,2BA3BIA,yEAmlIyBC,oBAvjI/BD;K;8BA0EYE;oBAy+HmBC;MAv+H7BD;QACEA,OAAOA,gCA0+HoBA,UAv+H/BA;MADEA,iCACFA;K;2BAqJcE;MAGZA,UA60HmCA,iBA50HrCA;K;YA+JEC;MASFA,OAAiBA,qBA3COA,6BA4C1BA;K;eA+EIC;;kBAklH6BH;MAhlH/BG;;;;;;UAMIA,UAyINA;;wBAq8GiCA;UA3kHDA;UAM1BA;YAAuDA,UAgI7DA;UA/HMA,OAAiBA,+DA+HvBA;;wBAq8GiCA;UAjkHDA;UAM1BA;YAAuDA,UAsH7DA;UArHMA,OAAiBA,mEAqHvBA;;wBAq8GiCA;UAvjHDA;UAM1BA;YAAuDA,UA4G7DA;UA3GMA,OAAiBA,mEA2GvBA;;sCAtfWA;UA8YmCA;UAMxCA;YAIEA,UA8FRA;UA7FMA,OAAiBA,6CAgiHgBC,6CAn8GvCD;;oBAq8GiCE;UA3hHLF;0BAtZjBA;UAwZsBA;UAM3BA;YAEEA,UA4ERA;UA3EMA,OAAiBA,8EA2EvBA;;kBAnhB6CG;sBAiDlCH;UA+ZmBA;UAMxBA;YAAmDA,UA6DzDA;UA5DMA,OAAiBA,6DA4DvBA;;0BAq8GiCI;UA9/GCJ;kCAhZvBA;UAwZDA;UAMJA;YAEEA,UAyCRA;UAxCMA,OAAiBA,8FAwCvBA;;sBA/bWA;yBA26HgCA;UA5gHbA;oBAq+GGK;UA99GLL;UACtBA;YAEEA,UAsBRA;UArBMA,OAAiBA,yFAqBvBA;;qBAi8GiCM;UA58G3BN;YAAmBA,UAWzBA;kCA8+GkDA;UAn/G5CA;YAAsBA,UAK5BA;UAJMA,eAINA;;UAFMA,sBAAMA;;IAEZA,C;oBAEQO;MAQkBA;0BAk+GiBA;;MAj+GzCA;sBA07G+BA;QAx7GRA;QACrBA;UACYA;;;MAIdA,kCACFA;K;oBAEQC;MASkBA;4BA68GiBA;;MA58GzCA;uBA88GgDA;;wBAzCjBA;QAj6GRA;QACrBA;UACYA;QAEZA;;MAWFA,oCACFA;K;iCAEoBC;MASkBA;+CAhXhCA;;+CAUAA;wCA+WgCA;kCA3VhCA;2BAkWmBA;MAMvBA;QAGEA,yBAYJA;MA1ZMC;YAUSD;YAUAA;YAiBAA;MAoXbA,aACFA;K;iBAkBQE;iBAEYA;MAElBA,aACFA;K;uBAKKC;6BAEaA;MAChBA;QACEA;UACEA,OAAOA,kCAabA;QAJMA,OAuzG2BA,oBAnzGjCA;;MADEA,WACFA;K;0BAOIC;MACFA;MAAQA;+BA7CRA;UAkDeA;UACXA;YAAiBA,UAIvBA;;MADEA,OAAOA,sBACTA;K;gBAKIC;MAUOA,uBA3ETA;QA2EEA,8BASJA;MA8yGoCA;QAnzGhCA,OAAOA,4BAKXA;MADEA,OAAOA,+BADWA,0BAEpBA;K;sBAIIC;sBAiBQA,KAAwBA;;MAIlCA;QAAiBA,iBAUnBA;;QALIA,iBAKJA;MADEA,UACFA;K;iBAKIC;MAEuCA,gBAD/BA;MACVA,iEACFA;K;gCAOIC;iCACgBA;4BACNA;MACZA;QAAmBA,YAErBA;MADEA,OAAOA,0DACTA;K;oCAGIC;sDAzIFA,iEA8JYA;cAMMA,+BA7hBMA,mCA+hBpBA;;MAIJA,UACFA;K;yBASIC;;oBACUA;oBA4sGoCA;MA1sGhDA;QArgBiBA,2BA3COpB;QA+jBjBqB;QAZLD,UAGJA;;MADEA,WACFA;K;8BAOKC;MAEHA,2BADUA,wBAEZA;K;qBAyDIC;MAhFqBA,qCAhLvBC;MAmQAD;QAAyBA,kBAO3BA;MANaA;QAETA,OAolGiCA,0BAplGLA,KAIhCA;MAmnGoCA;QArnGNA,OAxDlBA,4BA0DZA;MADEA,OAAOA,sBACTA;K;qBAIKE;MAKUA,YA78BTA;MAy8BJA,uBAv8BMC,oDAw8BRD;K;sBAQME;MApwBKA;eAbKA;;MAuxBdA;QACEA,UAv9BIC,sBA2/BND,gBA/BFA;MAHgCA,qCA1pBNA;MA4oBXA,kBA78BTA;MA49BJA,iCA19BMD,8DA49BRC;K;eAuBKE;MACHA,OAAOA,oBA1oBUA,qBA3CO5B,8BAsrB1B4B;K;6BAuDKC;MAGCA;MAGKA;QAAPA,kDA6EJA;MA+2FIC;;;QAA2CA;MA17F7CD;QACEA,OAAOA,wCA0EXA;kBApnCmDA;MA4iCjDA;QACEA,OAAOA,sEAuEXA;MA3DEA;QACEA,OAAOA,0CA0DXA;oCAi5FiCnC;+BAJAI;MAj8F/B+B;QACEA,OAAOA,6CAmDXA;;;;;;;;;MA/CEA;QACEA,OAAOA,oCA8CXA;MA3CEA;yBA07FqC3B;QAn7F/B2B,aAtgCGA;iBA7FHA;UAgnCFA;YACEA,OAAOA,wDAsBfA;UAhBMA,OAAOA,oDAgBbA;;aATSA;QAoCmBA,iDAo3FWzB,oBAr6H5B2B;QA+gCPF,OAAOA,0EAOXA;;MALEA,OAAOA,8DAKTA;K;eAGKG;MA9uCMA,OAVHA;MA0vCNA,0BACFA;K;8BAgCQC;;;MAy0FJH;;;QA9zF+CG;MALjDA;;;;;QAMIA;QAFGA;;;MAhyCEA,OATHA;MAgzCNA,0BACFA;K;WAEKC;wBAm1F4BpC;;MAj1FxBoC;;;YAGEA;cACmBA,qCAi1FGxC;gBAh1FCwC,oCAg1FDtC;MAr1F/BsC,SAOFA;K;gCAGKC;MAGCA;MACJA;QAAoBA,OAAOA,kBAG7BA;MADEA,OAAOA,gBA54BiBA,eA24BRA,mDAElBA;K;wCAQKC;MACHA;QAAoBA,WAMtBA;MADEA,OAt0CSA,IA4nIsBC,qBArzFjCD;K;sBAGKE;MAGCA;MACJA;QAAoBA,OAAOA,kBAY7BA;mBA1vCeA;MAwvCKA,uBAzjBlBA;QAsjBEA,oBAKJA;MADEA,uCACFA;K;0BAIKC;MAGCA;MACJA;QAAoBA,OAAOA,kBAoB7BA;MAdEA;QAAgDA,YAclDA;MA2yFoCA;QAvzFNA,WAY9BA;mBAtxCeA;MAoxCKA,uBArlBlBA;QAklBEA,oBAKJA;MADEA,uCACFA;K;iCAIQC;MAGFA;MACJA;QAEMA;UACFA,aAcNA;aA/4CWA;QA64CPA,aAEJA;MADEA;IACFA,C;yCAIQC;MAGFA;MACJA;QACEA,aAIJA;WA55CWA;QA05CPA,aAEJA;MADEA;IACFA,C;kBAQMC;MACJA,sBALkBA,yBADMA,yBAAgBA;IAO1CA,C;kBAsBgBC;MAIZA,OAHiCA,4CAEFA,eADfA,kGAKlBA;K;0BAOAC;;IAAqEA,C;iCAE7DC;MACNA,OAHFA,iCAGuCA,+BACvCA;K;eAaGC;MA39CMA;2BAwnIsBhD,sBAIAJ;MA9pF/BoD,gBA8pF+BlD,yBA5pFrBkD,iCArjCcA,0BA3afA,WAm+CXA;K;aAIKC;MACHA,qBACFA;K;aAIQC;MACNA;QAAoBA,aAWtBA;MADEA,sBAAiBA;IACnBA,C;UAIKC;MACHA,WACFA;K;UAIQC;MACNA,aACFA;K;YAIKC;MACHA,YACFA;K;WAIKC;MACHA,0CACFA;K;WAMKC;MACHA;QAAoBA,WAGtBA;MAFEA;QAAqBA,YAEvBA;MADEA,sBAAiBA;IACnBA,C;YAIMC;MACJA;QAAoBA,WAYtBA;MAXEA;QAAqBA,YAWvBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;YAIMC;MACJA;QAAoBA,WAItBA;MAHEA;QAAqBA,YAGvBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;aAIOC;MACLA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAoBA,aAWtBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAoBA,aAGtBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;UAIKC;MACHA,iEAEFA;K;UAIIC;;QACkBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;WAIKC;;QACiBA,aAWtBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;WAIKC;;QACiBA,aAGtBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;UAIKC;MACHA,gCACFA;K;UAIIC;MACFA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;WAIKC;MACHA;QAAoBA,aAWtBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;WAIKC;MACHA;QAAoBA,aAGtBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;aAIKC;MACHA,gCACFA;K;aAIOC;MACLA;QAAuBA,aAEzBA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAuBA,aAWzBA;MAVEA;QAOEA,aAGJA;MADEA,sBAAiBA;IACnBA,C;cAIQC;MACNA;QAAuBA,aAGzBA;MAFEA;QAAoBA,aAEtBA;MADEA,sBAAiBA;IACnBA,C;qBAEOC;MACEA;MACPA,uCA29EyCA,SA39EzCA;QAGMA,+BAi7EyBA;MA96E/BA,QACFA;K;sBAEOC;;iCAy6EgCpE;2BAr6H5BoE;MAogDTA;QAEEA,aAAaA,iDAmBjBA;yBAq7E2CA;MAkBrCA;wBAlBqCA;MAh8EzCA;QACEA;QAEAA;UAAqBA;QAChBA,0BAq5EwBA;QAp5E7BA;0BAk5EmCA;QA/4EnCA;;MAEFA,eACFA;K;wBAEOC;MAKEA;MAGPA;6BA26EyCA;QAz6EvCA;UAC2BA;;6CAEWA;+BAEVA;QAC5BA;UACEA;kHAKFA;6BAEsDA;UAAOA;UAArCA;8CAAcA;4EAAdA;2BAo3EKA;yBAJA3E;UA9BcgC;YA4CI4C;;YA5CJ5C;UAh1EzC2C;YAEoBA;;QAItBA;;QA3B0BA;uBAjlDepE;+BA0ElCoE;qCAwJLA;mDA6xHqCA;qCAnxHrCA;mDAmxHqCA;wBA/vHrCA;yBA+vHqCA;MAl4EjBA;MAIxBA;QAGMA,wDAo1EyBA;MA70E/BA;QACEA;QAEAA;UAGMA,wDAu0EuBA;QAj0E7BA;;MAGFA;QACEA;QAEAA;UACEA;mBAozE6BA;YAlzE3BA;UAGEA,qCAqzEuBA,sCAFMA;;QA3yEnCA;;MAGFA;sBAEuCA;;;MAOvCA,0EACFA;K;gBAYOE;;kBAgxE0B7E;MA7wE/B6E;QAA4BA,eA+E9BA;MA9EEA;QAA6BA,gBA8E/BA;MA7EEA;QAA0BA,aA6E5BA;MA5EEA;QAA2BA,cA4E7BA;MA3EEA;QAAyBA,YA2E3BA;MAzEEA;QAWIA,OATSA,kBAywEkBjF,0BAlsEjCiF;MA1DEA;8BA4vE+BtC;QA1vElBsC;uCAsvEkB7E;QAhvE7B6E,6EAkDJA;;MA/CEA;QAEEA,qBAAmBA,kBA+uEU/E,gCAlsEjC+E;MA1CEA;QAESA,4BAwuE4BzE;QA9tEnByE,gBA3tDTA;QA6tDPA,iBAHcA,2FA+BlBA;;MAzBEA;QACEA,OAAOA,yCAwBXA;MArBEA;QACEA,OAAOA,iDAoBXA;MAjBEA;QAGEA,OAAOA,0BAgtEsBrE,8BAp4HtBqE,OAksDXA;MAPEA;gBA/wD2CpE;2BAgxDboE;QAEEA;QAAvBA;4CAAOA;QAAdA,qBAAOA,IAIXA;;MADEA,UACFA;K;kBAEOC;0BD1iEOA,mBACLA;MC2iEPA;QAAuBA,gBAEzBA;MADEA,mBACFA;K;sBAuLiBC;yBAXXC,GASAD;aAIFA;uBAbEC,GASAD;MAOFA,WACFA;K;4BAEWE;;qBAhBPA;kBAkBUA;MACZA;QACEA,OAAOA,sCAcXA;WAbSA;QAo/DsBA;QAnxDtBA;QA9NsBA;QAC3BA;;QAGgBA;QAYTC;QAVPD,iBAIJA;;QAFIA,YAEJA;K;sBAKYC;MACRA,qCA3CAA,WA2C+CA;K;4BAoCvCC;MACRA,OAAOA,8BA7EPA,WA6EiDA;K;kBAa1CC;MA6/DPA;qBA9lEAA;;MAoGFA;QAAmBA,YAIrBA;MAkEoBA,sBADGA;MAw7DrBA;MA1/DAA,UACFA;K;+BAEWC;;2BAv8DkCA;MA68D3CA;QACUA,mBA58DNA;MAy7HFA;MAz+DFA;QAAmBA,YAIrBA;MAiDoBA,sBADGA;MAw7DrBA;MAz+DAA,UACFA;K;kBAEWC;;2BAp8DkCA;MAs8D3CA;QACUA,mBAr8DNA;oCAy1H+BrF;MA4EjCqF;MA39DFA;QAAmBA,YAUrBA;MAHYA,uEAs4DmBtF,4BA76HtBsF;MA8/HPA;MAr9DAA,UACFA;K;+BAiCWC;SApyELA;SAIAA;MAwyEJA,UACFA;K;gCAqGWC;MAk0DPA;wBA9lEAA;MA+RFA;QAAmBA,YAErBA;MAn6EIC;SAmJEC;SAwLAA;MA8lEGF;MA0zDPG,QAjmEEA;MAgSFH,SACFA;K;4BASWI;MAozDPA;sBA5EiC3F;wBAlhEjC2F;MA8SFA;QAAmBA,YAMrBA;MAFIA;MA+yDFD,QAjmEEA;MA+SFC,SAKFA;K;4BAEWC;MAMTA;;2BAotD6B7F;QAltDvB6F;;;UAE6BA;QAFjCA;UAIEA,eAQNA;;MA58EIJ;SAmJEI;SA6CAA;MA2wEGA,GAhoEHA;MAgoEJA,mDACFA;K;gCAEWC;MAkxDPA;sBA5EiC7F;wBAlhEjC6F;MAoVFA;QAAmBA,YAMrBA;MAFIA;MAywDFH,QAjmEEA;MAqVFG,SAKFA;K;gCAEWC;MAMTA;;2BA8qD6B/F;;QA5qDvB+F;;YAESA;cAELA,4CA4qDmBjG;QAhrD3BiG;UAKEA,eAoBNA;;UAjBMA,iBAiBNA;aAhBWA;iCAuqDoBnG;UAnqDrBmG,gBA+pDqB/F,yCAIAF;YAlqDvBiG,mBAWRA;;YATQA,OAAWA,8CASnBA;;;MA//EIN;SAmJEM;SA6CAA;MA8zEGA,GAnrEHA;MAmrEJA,mDACFA;K;gCAEWC;MA+tDPA;sBA5EiC/F;wBAlhEjC+F;MAuYFA;QAAmBA,YAMrBA;MAFIA;MAstDFL,QAjmEEA;MAwYFK,SAKFA;K;gCAEWC;MAMTA;;qBAt4E+CA;QAw4EzCA;UAGFA,eAYNA;aAXWA;UACLA,OAqHFA,+DA3GJA;;UARMA,iCAQNA;;MAxiFIR;SAmJEQ;SA6CAA;MAu2EGA,GA5tEHA;MA4tEJA,mDACFA;K;gDAEWC;MAsrDPA;;wBA9lEAA;MA4aFA;QAAmBA,YAMrBA;MApjFIT;SAmJEU;SA6CAA;SA2IAA;MAovEGD;MAoqDPP,QAjmEEA;MA6aFO,SAKFA;K;kCAccE;;4BA2nD2BA;MAxnDvCA;6BAilD6BA,GAFMnG;MAzkDnCmG,QACFA;K;uCAEcC;;4BA+mD2BA;MA3mDvCA;uBA6mD8CA;4BA/CfA;4CAMFA,OAFMpG;;MAtjDnCoG,QACFA;K;iCAiBWC;MAKFA;;oBAwkDgCC;QAplDnCD;MAunDFA,gBA9lEAA;MAsfFA;QAAmBA,YAMrBA;MA9nFIb;SAmJEe;SA6CAA;SAeAA;oBA8+HmCA;WA1nInCA,2BA4nI0CA;SAp3H1CA;MAo0EGF;MAolDPX,QAjmEEA;MAufFW,SAKFA;K;+BAuCWG;MACLA;cA4+CyBzG;sBAIAK;QAsD3BoG,mBAv+HKA;;QA08EyCA;QAATA;;MAhBrCA,aAq/CiCxG;MA4EjCwG,gBA9lEAA;MAgjBFA;QAAmBA,YAMrBA;MAxrFIhB;SAmJEiB;SA6CAA;SAeAA;SA4HAA;MA03EGD;MA8hDPd,QAjmEEA;MAijBFc,SAKFA;K;8BA6BWE;MALPA;;gBAghDAA,QA9lEAA;MA2lBFA;QAAmBA,YAMrBA;MAnuFIlB;SAmJEmB;SA6CAA;SAeAA;SA4HAA;MAq6EGD;MAm/CPhB,QAjmEEA;MA4lBFgB,SAKFA;K;gCAqEWE;MA5BPC;sBAv8EUA;uCA2FVC;qDA6xHqCA;uCAnxHrCA;qDAmxHqCA;0BA/vHrCA;2BA+vHqCA;;MA96CvCD;QAIIA;QAEAA;;MAKJA;QAIIA;QAEAA;;MArd6CA;MAq5D/CD,gBA9lEAA;MA8qBFA;QAAmBA,YAMrBA;MAtzFIpB;SAmJEuB;SA6CAA;SAeAA;SA4HAA;MAw/EGH;MAg6CPlB,QAjmEEA;MA+qBFkB,SAKFA;K;uCA0BWI;MAJTA;8BAw0CmChH;gBA4EjCgH,QA9lEAA;MAutBFA;QAAmBA,YAYrBA;MARIA;MAs4CFtB,QAjmEEA;MAwtBFsB,SAWFA;K;uCAEWC;MAOTA;;wBA+0CuCA;QA50CNA;QAC/BA;wBAoyC2BA;mBAJAlH;;YA5xCvBkH;;;QAGJA;UACwBA;UAMEA;UAMxBA,OAAOA,iHAcbA;;;MAp5FIzB;SAmJEyB;SA6CAA;SAeAA;MAosFGA,GAxkFHA;MAwkFJA,mDACFA;K;kBA6HcC;MAMZA,0EAeFA;K;iBAqBWC;;uBAhB6BA;mBACDA;sBAmBnBA,gBAAlBA;QAXwCA;QAatCA;UACMA;aACCA;UACDA;aACCA;UACDA;;UAEJA;UACAA;;cAEIA;;cArBRA;cAyBQA;;cAzBRA;cA6BQA;;cA7BRA,QAkCUA,uBA/C8BA,UACCA,IAeNA;cAiC3BA;;cApCRA,QAmbiBA,qDAhcuBA,IAymCXC;cAjjCrBD;;cA3CRA,QAvrBOA,qCA0qBiCA;cA4DhCA;;cA/CRA,QA/qBOA,qCAkqBiCA;cAgEhCA;;cAnDRA,QAvqBOA,qCA0pBiCA;cAoEhCA;;cAvDRE,cATqCA;2BAgpCEA;cA5kC/BF;;cAGAA;cACAA;;cAGAA;cACAA;;yBAhFgCA;cAaxCA,QAyEoBA,+BAERA,2BAvF6BA,IAeNA,iBAPIA;cAmF/BA;;yBA5FgCA;cAaxCA,QAqFoBA,mCAERA,2BAnG6BA,IAeNA,iBAPIA;cA+F/BA;;yBAxGgCA;cAaxCA,QAiGoBA,mCAERA,2BA/G6BA,IAeNA,iBAPIA;cA2G/BA;;cAvGRA;cAAAE,cATqCA;2BAgpCEA;cA3hC/BF;;cAGAA;cACAA;;cAhHRE,cATqCA;2BAgpCEA;cAnhC/BF;;cA+hCNG,wBA5pCmCA;cA+WrCC,wBAnXwCD,UACCA;cAwmCZA;cA5lC7BC;;cAwHQJ;;cAxHRE,cATqCA;2BAgpCEA;cA3gC/BF;;cAuhCNK,wBA5pCmCA;cAsXrCC,6BA1XwCD,UACCA;cAwmCZA;cA5lC7BC;;cAgIQN;;cA+hCNO;cA/pCFA,QA4pCEA;cA5pCFA;cAAAL,cATqCA;2BAgpCEA;cAnzBhCF;cAhNCA;;cAGAA;;;;MApI2BA;MAyInCA,OAAOA,uBAzJiCA,UACCA,SAyJ3CA;K;uBAOWQ;MACLA;;sBACcA,SAAlBA;QAxJwCA;QA0JtCA;UAAyBA;QACXA;;MAzJhBA;MA4JAA,QACFA;K;4BAEWC;MAOLA;;sBACcA,SAAlBA;QAzKwCA;QA2KtCA;UACEA;YAAeA;UACHA;;UAC0BA;YOhwGKA;;YP+vG/BA;UACPA;YAGLA;;;MA6+BFA;MAz+BFA;mBAhMwCA;4BACCA;uBAwmCZ7H;mCAIAK;QA13DRwH,6CAw3DczH,UA5pBjC0H;QAztCFD;UACEA,+CAA4BA;QAsxB9BA,WApxBiBA;;QAoxBjBA;MA+LAA,QACFA;K;+BAEYE;MAEMA;mBAjNwBA;;eAgBLA;MAmMnCA;QAtMAA,WAwMwBA;;QAEXA,kCAtN4BA;oBAwmCZ/H;;YA5lC7B+H,WA+MkBA,kEAnNqBA;YA0NjCA;;YAtNNA,WAyN4BA;YACtBA;;;IAGRA,C;2BAOYC;MAjOyBA;mBAhBKA;;;MAsQxCA;QAEEA;;YAxPiCA;YA2P7BA;;YA3P6BA;YA+P7BA;;YAlQNA;YAsQMA;;;QAtQNA;MA4Q6BA;MAzQMA;MA4QnCA;;UA5QmCA;;mCA91BgBA;;;UA+mC9BA,wCAhSoBA;UA/oFvCnH;oBAUSmH;oBAUAA;oBAiBAA;UAsnFXA,WAiSgBA;UAEZA,MAoBNA;;UAvTEA,WA4SgBA,iCAkzBmBA;UA5yB/BA,MAKNA;;UAFMA,sBAAMA,oDAA8CA;;IAE1DA,C;oCAgCYC;MApVyBA;MAsVnCA;QAzVAA,WAnqBOA,qCAspBiCA;QAwWtCA,MAOJA;;MALEA;QA7VAA,WA3pBOA,qCA8oBiCA;QA4WtCA,MAGJA;;MADEA,sBAAMA,qDAA+CA;IACvDA,C;wBAEeV;MA+yBXA,+BA5pCmCA;MA+WrCA,wBAnXwCA,UACCA;MAwmCZA;MApvB7BA,YACFA;K;kBAWWW;MACTA;QAEEA,OAAiBA,wDAltCgCA,KA4tCrDA;WALSA;QACUA,WAAiCA;QAAhDA,yDAIJA;;QAFIA,WAEJA;K;mBAEYC;;uBAowB6BA;MAlwBvCA;QAEaA,wDAkwBiCA;IA/vBhDA,C;wBAEYC;;uBA2vB6BA;MAxvBvCA;QAEaA,wDAwvBiCA;IArvBhDA,C;uBAEWC;;0BAssBoBrI;MApsB7BqI;QACEA;UAAgBA,kBAusBWhI,SAjrB/BgI;mCAhwGSA;2BAw9HgCA;QA3uBrCA;UACEA,oBAmsByBA,WAjrB/BA;QAfIA;iCAgsB2BhI;0BAJAL;aAxrB3BqI;QAAgBA,kBAWpBA;MATEA;QACEA,sBAAMA;iCAlwGDA;gCAk+HgCA;QA3tBrCA,oBAorB2BA,WAjrB/BA;MADEA,sBAAMA,mDAAsCA;IAC9CA,C;aAsDGC;;kBAp7GKA;;QAAoBA,UAApBA;MAynIJA;MAlsBJA;QAuBSA;QA8qBPA;;MAjsBFA;QAAmCA,YASrCA;MAREA;QAAkCA,WAQpCA;MADEA,WACFA;K;cAuCKC;MAiBHA;;QAA8BA,WA2OhCA;MAsSIA;;;QAjhBmCA;MAGrCA;QAA4BA,WAwO9BA;eAoUiCvI;MAziB/BuI;QAA0BA,WAqO5BA;MAlOMA;QAAmBA,YAkOzBA;YA7rHmDC;MA89GjDD;QAA+BA,WA+NjCA;MA5N0BA;MACxBA;QAGMA,+BAgiByBA,EAJA9H;UA5hB6B8H,WAwN9DA;eAoUiCvI;;MAphB/BuI;QACEA;UACEA,OAAOA,iCAshBoBzI,uBAxUjCyI;QArMIA,2EAqMJA;;;QAhMIA;UACEA,OAAOA,wBAugBoBzI,gCAxUjCyI;QAtLIA;UACEA,OAAOA,wBA6foB3I,gCAxUjC2I;QA5KIA,kBA4KJA;;MAxKEA;QACEA,OAAOA,wBA+esB3I,gCAxUjC2I;MA5JEA;QAOcA;QANZA,OAAOA,gDA2JXA;;MA9IEA;QACOA,6BAqdwBzI;UA7c3ByI,YAqINA;QAnIIA,OAAOA,uBAEDA,gEAiIVA;;MAxHEA;QAEUA;QADRA,aAEIA,wBA6byBhG,gCAxUjCgG;;MArGEA;QACMA,qCA4ayBzI;UApa3ByI,WA4FNA;QA1FIA,OAAOA,gCAIDA,uDAsFVA;;MA/EEA;QAEUA;QADRA,aAEIA,iCAoZyBhG,uBAxUjCgG;;MA/DEA;QAAsBA,YA+DxBA;MA5DiCA;;QAE7BA,WA0DJA;MAtDMA;;QAAqDA,WAsD3DA;MAjDEA;;UAC2BA,WAgD7BA;QA/CIA;UAAsCA,YA+C1CA;mBA5jHWA;;yBA26HgCA;;UAxZfA,YAyC5BA;QA8XMA;;QAlaFA;0BA4W6BA;;UAzWtBA,mEACAA;YACHA,YA+BRA;;QA3BIA,OAAOA,gCAmWsB/H,yCAxUjC+H;;MAlBEA;;UAC2BA,WAiB7BA;QAhBIA;UAA+BA,YAgBnCA;QAfIA,OAAOA,uDAeXA;;MAXEA;QACEA;UAAgCA,YAUpCA;QATIA,OAAOA,wDASXA;;MALEA;QACEA,OAAOA,qDAIXA;MADEA,YACFA;K;sBAEKE;MAWCA;MAECA,6BAyT0BlI;QAxT7BkI,YA8FJA;qBAprHWA;;uCAwJLA;;qDA6xHqCA;;MAjVzCA;QAA2DA,YAgF7DA;MA9EMA;uCAp8GAA;;qDAmxHqCA;;MArUzCA;QAEEA,YAkEJA;MAhEEA;gCAmUgDA;QAhUzCA,+CAuRwBA;UAtR3BA,YA4DNA;;MAxDEA;gCA2TgDA;QAtTzCA,+CA6QwBA;UA5Q3BA,YAkDNA;;MA9CEA;gCAiTgDA;QA5SzCA,+CAmQwBA;UAlQ3BA,YAwCNA;;0BA9/GMA;;2BA+vHqCA;;MA/RzCA;sBAsPqCA;eApPnCA;UACEA;YAA4BA,YA2BlCA;wBAwNuCA;UAjPjCA;UACAA;YAAyCA,YAwB/CA;8BAoNmCA;UAzO7BA;YACEA;cAAiBA,YAoBzBA;YAnBQA;;qBAsR0CA;UAlR5CA;YAAiCA,YAevCA;qBAmQkDA;UA/QvCA,kCAsOsBA;YArOzBA,YAWRA;UAVMA;;;aAIFA;kBA0N+BA;UAzN0BA,YAK7DA;QAJMA;;MAGJA,WACFA;K;uBAEKC;;iBAsNkCtI;;aA3MrCsI;uBAn0DI1D,GASA0D;QAu0DFA;UAAkBA,YA4CtBA;QA3CIA;UA6LmCA;UA3LjCA;;sBA9dAA;QAkeFA;UAAqBA,YAqCzBA;yBA2L2CA;QAL/BA,uDAnvHcC,aAsxD6BA;QAqwDnDD;UAE+BA,qEAkLIA;QA9KnCA,OAAOA,8DA3wHAA,oBAuyHXA;;MATEA,OAAOA,mCA9xHEA,yCAuyHXA;K;yBAEKE;;uBAyLsCA;MAvKzCA;QAgCSA,iCAgGsBA;UA/FzBA,YAKRA;MADEA,WACFA;K;oBAEKC;;mBA/0HMA;;wBA88HgCA;;QAjHnBA,YAaxBA;WA2DuCvI;QArEnBuI,YAUpBA;MAREA;QAGOA,mCAkEwBA;UAjE3BA,YAINA;MADEA,WACFA;K;cAEKC;kBAuD4B9I;;;QApD3B8I;UACKA;YACmBA,kCAsDGlJ;cArDCkJ,iCAqDDhJ;MAzD/BgJ,SAKFA;K;uBAWK9G;MAA8BA;MAO/BA;;;QAA2CA;MAPZA,SAGlCA;K;kBAMI+G;kBA4B4B/I;MA1B/B+I,0FAKFA;K;uBA4CcC;MAFRA;;sBAsBqCA;MAhBvCA;kBAzBmCA;QAoC3BL;;IAPVK,C;0BAKeL;MACXA,8CAlvHoBA,aAsxD6BA,IA89DDA;K;;;;;;;;;;;;;;;;;;;;0CQ1vIpCM;MACdA;MAESA,QADLA;QACFA,+DAgCJA;cA9BMA,iCACAA;QAAiCA;QAEzBA;QACCA;;QASIA,0BACXA,yBAPYA,uEAQhBA;QAEAA,OAAOA,mEAaXA;aAJWA,QADEA;QACTA,qEAIJA;MADEA,OAAOA,uDACTA;K;0CAEYC;MAKVA,uBACIA,yBALYA;IAMlBA,C;gDAEYC;MAKVA,kBACIA,yBALYA;IAMlBA,C;yCAEYC;MACwBA;MAU3BA;IATTA,C;eA0BAC;;;;IAaAA,C;4BA0FWC;MACXA,OAjCAA,2BCgJAC,eAAyBA,gBAAzBA,2BDhJAD,sCAkCFA;K;mBAUQE;MAENA;eACUA;MACVA,gBAxBwBA,QAyB1BA;K;eASQC;MACNA;IACFA,C;gBAQQC;MACNA;IACFA,C;iBAOQC;MAENA,0BACIA,2BAAyBA;IAC/BA,C;kBASKC;MAECA;;wBAEqBA;;QASvBA;;;;UAEAA;;UCuCFA,wBAAyBA;gBAuJvBA;gBACAA;UD1LAA;;;IAEJA,C;2BAIkBC;;;;;;;;;;;;;OACAA;MAuBhBA,OAAYA,CEkVeA,0CFlVgBA,wFAG7CA;K;gCGxToBC;MAChBA;MAAUA;QACeA;QACvBA;UAAwBA,iBAG5BA;;MADEA,QAAkBA,oBACpBA;K;mBFdUC;WACMA,oBACIA;QAAYA,WAKlCA;MAH2BA,WAG3BA;K;uBAOWC;WCioBkBA,oBD/nBNA;QACDA;MAGpBA;QAGYA;UACWA;UACnBA;YDjBSA,sCCkBiBA;;;;;WDnBpBA;QACGA;MC2BbA,OElCAA,mCFmCFA;K;4BA2kBcC;;;8CAnRYA,yBAqRtBA;QA/JOA;cAgKLA;;MAEFA;QACEA,6BN3bJA,6EMiciBA;QAEbA,MA6BJA;;0BA3B2BA;aAClBA;MACPA;QAGmBA,qEAAmBA;cAjStCA,gBAA0BA;cAC1BA;QAkSEA;QACAA,MAmBJA;;MAhBWA;kBACGA;UACeA;;UADYA;;QN/czBA;MM8cdA;QAM+BA;QAC7BA,4BAAoBA;QACpBA;QACAA,MAOJA;;;MCiqCEA,2CDpqCOA,QCoqCkCA,wBDpqCVA;IAGjCA,C;iCA0IYC;;;kHAIVA;QAAaA;eA1cQA;QAAOA;QAAeA;QA6czCA;UACEA;YA/VGA;YCy0CPA,6BDv+BmBA,kBACAA;;UAGfA,MA0KNA;;cArKoBA;gCACyBA;QACzCA;YACWA;UACTA,sCAAsBA;gBACtBA;sCACwBA;;mBAGGA;yBAAOA;cAQ/BA;cACDA;QAKkCA;iBAlrBhBA;UAkrBGA;;UAvCpBA;QAuCLA;mBAprBeA,OAAOA;UAsrBPA;mBAAWA;YAARA;;YAAHA;UAAbA;YApYGA;YCy0CPA,+BDj8BmBA,oBACAA;YAEbA,MAqIRA;;qBAjI0BA;UAApBA;;;YA4FIA;iBAxxBmBA;UA2wBvBA;YAxE+BA,yFAyE7BA;eACKA;YACLA;cA9BsBA,8EA+BpBA;iBAGFA;YAzBcA,gEA0BZA;UAKJA;;qBAIIA;;uBACAA;yCAvsBuCA,YAAsBA;;YAssB9BA;UAAnCA;YAESA;2BAGUA,SAASA;mBApmBTA;cAmNNA,uBAAUA;oBAC3BA;cACOA;oBAtEPA,YACYA,qBAAkCA;oBAC9CA,wBAA4BA;oBAwdlBA;cACAA;;cAEAA;YAKJA,MAeRA;;;uBAXqBA,SAASA;QAlaXA,uBAAUA;cAC3BA;QACOA;mBAkaAA;mBACcA;QADnBA;UA3fmBA;gBADrBA;gBACAA;;UA8feA;gBAzffA,gBAAwBA;gBACxBA;;cA4fEA;;;IAEJA,C;yBAgEOC;MACPA;MAAiBA;QACfA,OAAOA,4FAaXA;;MATmBA;QACfA,OC8wBiEA,oBDtwBrEA;MANEA,sBAAoBA;IAMtBA,C;kBG5hCKC;MACHA;oBAAiBA,gBAAjBA,wBAAuDA;;oBAEpCA;;QAEjBA;;QACOA;;IAEXA,C;uBAEKC;;;QAKDA;;;;aAIIA;UJnBJA,6CAAyBA,OIoBMA;;IAGnCA,C;0BAMKC;MAnDHA;wBAqDoCA;MACpCA;;cAEOA;UJlCLA,6CAAyBA,OImCMA;;sCAGlBA;IAGjBA,C;kCAQKC;;cACCA;MAAJA;QACEA;mCACwBA;QACxBA,MAgBJA;;MA3FEA;8BA8E4CA;MAC5CA;aACQA;;;mCAG0BA;aAC1BA;sDACeA;QAErBA;;;IAIJA,C;qBA0BKC;;uBACsBA;WACXA;QAGZA,wCAHYA;QAIZA,MAcJA;;MFwrDIA,oDAAyCA,wBEzrDPA;IACtCA,C;iCC0/EUC;MCtoDSA;MDyoDjBA,OC1oDAA,uDD0oD8BA;K;eE/0D3BC;MAC8BA,MAMnCA;K;sDD1pBkBC;;QAEEA;MACAA;QACdA,OAAOA,2FAWXA;MAPkBA;QACdA,OJ2oD+DA,8CIroDnEA;MAJEA,sBAAUA;IAIZA,C;qBAyWGC;MJgwCDA;II9vCJA,C;oBAGKC;IAAoBA,C;oBJy7BpBC;MACHA,iCAA+BA;IAGjCA,C;YAEEC;;cACmBA;MAAnBA;QAAoCA,OAAOA,UAY7CA;;MANQA;;QAEGA;QAAPA,SAIJA;;;;K;iBAEEC;;cAOmBA;MAAnBA;QAAoCA,OAAOA,aAY7CA;;MANQA;;QAEGA;QAAPA,SAIJA;;;;K;kBAEEC;;cAQmBA;MAAnBA;QAAoCA,OAAOA,oBAY7CA;;MANQA;;QAEGA;QAAPA,SAIJA;;;;K;0BAqCKC;MAS8BA;WAHlBA;QAGPA;MAKRA;IACFA,C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;2BM7wCSC;uBACOA;MAGZA,qCACFA;K;2BAEYC;MAIVA;;;;IAQFA,C;0BAoBOC;MAIOA;MAIZA;MAoKOC;MAlKPD,YACFA;K;wCAgKQC;MACNA,sCAAOA,kEdrfTA,uFcsfAA;K;sCAMQC;MACNA,Od7fFA,qFc8fAA;K;uBCxbcC;MAEZA;MAAIA;QACFA,cAwBJA;MCyXAA;;QD7YMA;QACFA;;UAEKA;QACLA,eAAUA;;;QAYVA;gDAAiBA;QAAjBA;;iBC4Z0CA;MDzZ5CA,sCACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;8BEoOWC;MAULA;;;mGAQJA;QACaA;kCAAKA;oBAALA;QACXA;QACoBA;QACpBA;QACAA;UACSA;UAA2CA;UAA1BA;wCAASA;UAAjCA;;;+CAAMA;UAAkBA;UACjBA;UAA2CA;UAA1BA;wCAASA;UAAjCA;gDAAMA;UAAkBA;UACjBA;UAA2CA;UAA1BA;wCAASA;UAAjCA;+CAAMA;UAAkBA;UACjBA;UAAqCA;UAApBA;wCAASA;UAAjCA;gDAAMA;UAAkBA;UAVfA;UALOA;;;MAoBpBA;QACEA;UAiCOA;UACAA;UAFTA;YACoDA;YAA1BA;0CAASA;YAAjCA;;;iDAAMA;YAAkBA;YAC0BA;YAA1BA;0CAASA;YAAjCA;kDAAMA;YAAkBA;YACjBA;YAAPA;kDAAMA;;YACNA;iDAAMA;;;YAG4CA;YAA1BA;0CAASA;YAAjCA;;;iDAAMA;YAAkBA;YAC0BA;YAA1BA;0CAASA;YAAjCA;kDAAMA;YAAkBA;YACjBA;YAA2CA;YAA1BA;0CAASA;YAAjCA;kDAAMA;YAAkBA;YACxBA;iDAAMA;;;UAxCJA,QAgBNA;;QAdIA,4CAcJA;;MATEA;QACaA;kCAAKA;oBAALA;QACXA;UAA4BA;QAC5BA;;MAImCA;gCAAKA;MAF1CA,sBAAoBA,yEAEsBA,qCAALA;IAEvCA,C;8BA0RWC;MAzDFA;;;;;0BA8EgCA;0GACvCA;QACaA;kCAAMA;QAANA;QACXA;QAC2BA;QAAhBA;6CAAeA;8BAAfA;QACXA;UACqCA;UACpBA;UACfA;YAESA;YAAPA;;;8CAAMA;;YACCA;YAAPA;+CAAMA;;YACCA;YAAPA;8CAAMA;;;YAbCA;;UAgBTA;eACKA;UACLA;YAAqCA;UACrCA;YACEA;cACEA,sBAAMA;YAEDA;YAAPA;;;8CAAMA;;YACNA;+CAAMA;;;YAENA;cACEA,sBAAMA;YAERA;;8CAAMA;;;UAOiBA;UACzBA;YAA2BA;UAE3BA,OAAOA,uEAcbA;;QAZIA,sBAAMA;;MAERA;QACEA,gCASJA;MALEA;QACaA;kCAAMA;QAANA;UACsBA;;MAEnCA,sBAAMA;IACRA,C;kCAOiBC;MAOIA;;uBAGCA;;MAIpBA;QACEA;MAEFA;QAAsBA,ObikByBpM,4Ba7jBjDoM;MADEA,OAAOA,oCACTA;K;oCAaWC;;;;;;MAMTA;;;;UACEA;UACWA;wCAAMA;UAANA;UACXA;YACEA;;;;UAIFA;YACEA;cAAoBA;YACpBA;YACOA;0CAAMA;YAANA;;UAETA;YACEA;cAAoBA;YACpBA;YACOA;0CAAMA;YAANA;;UAETA;YACEA;;;;UAIFA;;;MAEFA,aACFA;K;gCAoBWC;MAETA;;QAAkBA,YA0CpBA;MA9PSA;8BAwNPA;QACaA;sCAAMA;QAANA;QACXA;UACEA;YACEA;YACAA;YACAA;;UAEFA;YACEA;YACAA;YACAA;cAAkBA;YACXA;0CAAMA;YAANA;;YAEPA;;QAMJA;UAEEA;YAAqBA;UACrBA;UACAA;UACAA;YAAkBA;UACXA;wCAAMA;UAANA;;QAGTA;UAA8BA;QAC9BA;QACAA;QACAA;UAAkBA;;MAEpBA;QACEA,sBAAMA;MAERA,2BACFA;K;;;;;;;;;;;;;;;;;;gBDznBaC;MACHA;;QAARA;MACiCA;MACjCA;MACAA;IACFA,C;oBAoCQC;MAEuCA;;MAC7CA;QAEEA;;MAMFA,aACFA;K;gBAkBQC;MAC4BA;MAAZA,SAOxBA;K;iBAOQC;MACNA;MAAaA;QAAYA,OtBpPvBC,gBANiCC,4CsBkQrCF;MALoBA;MAClBA;QACEA,8BADFA;MAGAA,WACFA;K;+BAoCQG;MAEKA;;MAkBFA;MAAPA,SAGJA;K;+BAqBcC;yBAEQA;MACpBA;QAAkBA,SAGpBA;MADEA,OAAkBA,6DACpBA;K;0BA+FcC;MACgBA;MACvBA;QAAqBA,aAa5BA;mBEpKoBA;;UFuKgCA,cAbVA;eAC7BA;;QAYuCA,cAVZA;eAC7BA;UASyCA,kCAPVA;;MAGxCA,aACFA;K;sDAgBQC;MAEJA,OASJA,kCAT6CA,6BAC1BA,sCAAgCA,gCAAeA;K;sBAyM5CC;MAAWA,+BAAsBA,YAAsBA;K;wBGvL/DC;MACDA;;MAEXA;QAAkBA,aAIpBA;MAHEA;QAAiBA,wBAGnBA;MAFEA;QAAgBA,yBAElBA;MADEA,0BACFA;K;yBAUcC;MACZA;QAAcA,aAGhBA;MAFEA;QAAaA,cAEfA;MADEA,eACFA;K;uBAEcC;MACZA;QAAaA,aAEfA;MADEA,cACFA;K;sBlBviBcC;MACgBA;QAC1BA,OAAOA,qBAMXA;MAJEA;QACEA,OTmwFGlS,sBShwFPkS;MADEA,OekLkBA,iCfjLpBA;K;6BA8BaC;MACXA;MACAA;MACAA;IACFA,C;mBAYAC;;IAA8BA,C;kBAuD9BC;;IAEqBA,C;uBAcrBC;;IAEoBA,C;oBAyDpBC;;IAG6DA,C;oBAe7DC;;IAQgEA,C;8BAuFrDC;MAUTA;QAEEA,sBAAiBA;MAEnBA;QACEA;UAEEA,sBAAiBA;QAEnBA,UAGJA;;MADEA,cACFA;K;+BAWWC;MACTA;QACEA,sBAAiBA;MAEnBA,YACFA;K;yBAsEAC;;IAMqEA,C;qBA8FrEC;;IAAqCA,C;uBAcrCC;;IAAkCA,C;eAyBlCC;;IAAwBA,C;gCAaxBC;;IAAkDA,C;uBmBpmB1CC;MAA4BA,OAOpCA,yBAPuDA;K;oBAiDjDC;;IAA8DA,C;kCC8vBtDC;MAKZA;MAAIA;QACFA;UAEEA,cAgBNA;QAdIA,6CAcJA;;MAZ+BA;MAC7BA;;QAEEA;;QAGAA,UALFA;UAKEA,gBALFA,sBAKmBA;QAAjBA,CALFA;;MLhUYA,6CAAqBA;MKuUjCA,sCAIFA;K;iCAYcC;MAKZA;MAAIA;QACFA,6CAYJA;ML1XAA;MKiXEA;;QAEEA;QLlWUA,EAAZA,wCAAsBA;;QKqWpBA,UALFA;UAKEA,gBALFA,sBAKmBA;QAAjBA,CALFA;;;iBLlV4CA;MK0V5CA,sCACFA;K;2BA0BGC;MAwB6BA;;;MAGhCA;;;QACOA;UAAeA,MAkFxBA;QAjFwBA;QACpBA;uBACeA;QACfA;;MAQGA;QACHA;UAAoCA,MAqExCA;QApEqBA;mCAAMA;QAANA;QACGA;mCAAMA;QAANA;;QAEKA;QACzBA;QACKA;UACHA;YACEA,+BAAYA;YACZA,MA4DRA;;UA1DyBA;UACCA;qCAAMA;UAANA;mCACKA;;UAEHA;UACtBA;iBAGOA,iBAAPA;YAEgBA;YACdA;YACAA;cAQEA;;;gBAEYA;2CAAMA;gBAANA,sBAAmBA;gBAC7BA;;cAEFA;cACAA,MAgCVA;;;UA7B4BA;UACHA;mCACMA,2BAA2BA;;;uBAOtCA;QAEhBA;QAfgBA;;;MAqBlBA;mCAAqCA;;QACzBA;mCAAMA;QAANA,sBAAmBA;QAC7BA;UAEEA;UAzBcA;;;MA4BlBA;QACEA;MAEFA;MACAA;IACFA,C;eC90BaC;MAuByBA;MAAkBA;M3BP7CA,8BADAA,qBADAA,qB2BSuDA;MAA5DA,cAySJA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BCveFC;;oBACiBA;MACfA;QAAsBA,eAexBA;;;;;OAdYA;MAWaA;;MAEvBA,UACFA;K;yBAqBAC;MACkCA;MAAVA;MAAtBA,OPiCoBA,sDOhCtBA;K;gBAOEC;MACAA;QAEEA,QAIJA;;QAFIA,OAAOA,qCAEXA;K;kBAuDmBC;MACjBA;;QACEA,sBAAMA;;;;;OAEOA;MAWWA;MAC1BA,aACFA;K;0BA4MAC;MAC0BA;MAApBA;QAAaA,OAAOA,qBAE1BA;MADEA,OAAOA,iBACTA;K;oBC5UKC;MACDA,oBACEA,gEAGAA,yBACAA,0BACAA,iCACAA,0BACAA,2BACAA,0BACAA,2BACAA,4BACAA,4BACAA,2BACAA,qBAAWA;K;SAGTC;MACFA;QACFA,aA8BJA;MADEA,OAzBgBA,qBVuVPA,uFU9TFA,cACTA;K;cAyCEC;MAEAA,OAAOA,gCACTA;K;mBAmcUC;MjB/NRC,wBAAyBA,gBAAzBA;oBAlQIC;MiBifJF,eAbgBA,yBAAuBA,kDACzBA,yBAAuBA;MAarCA,SACFA;K;sBAsCKG;MACDA,gZA+BCA;K;WAGGC;MACFA;QACFA,QAgEJA;MADEA,OA1DeA,sBVpRNA,uFU8UFA,SACTA;K;;;;;;;;;;;;;;;;;;;;mBCjoBUC;MACUA;;oBAC4BA;;MAC9CA;QAEEA;UAA0BA;YACpBA;0CAAMA;yBAANA,sBAAoBA,0BAAwBA;;YADxBA;;;UAExBA;;QAEFA;UAEUA;QACHA;QAAPA;UAAmBA;YAAUA;YAAPA;wCAAMA;uBAANA;;YAAHA;;;UACjBA;;QAGFA;UACEA;YAAkBA,sBAAMA;;UAExBA;QAGYA;;MAEhBA,aACFA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0BCVEC;;uBAKiBA;MAAfA;QACEA,kBAAMA;MAMSA,8EAJIA;MARvBA;IASAA,C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mBCnFWC;MAGaA;wDAApBA,sBAAqCA;MACzCA;QACEA,gBCkQEA,OAAUA;QJzMZA;;QCsDFA,6BAM2BC,4EE9GXD,8DFsHsBC,qBIjJlCC;QF8BFF;gDACwCA;QAGpCA;mBFqHAA,gBAA0BA;UAC5BA,gBG8HAG,OAAUH;iBH5HVA,cATkCC;;eAWpCD;eACAA;QACAA;;MEzHFA,cACFA;K;2BAEKI;MAEEA,+CADLA,sBACsBA;;UFwHpBA;IEtHJA,C;QAEKC;MAESA;;;kDAFTA;QAESA;;;;;;cAALA;oBCoF8BA;gBACjCA,kBAAMA;cAQeA,mBD7FCA;gBC8FxBA,WD9FwBA;cC0HQA,kBDzHbA;cAIrBA;cCiOIA,UAAUA;cpC2uGZA;;cgCp7GAA;gBIyMEA,UAAUA;gBJzMZA,iCGWEA;;cHXFA,4BG6WAA;;cACJA;;;MA3YcA;IA2YdA,C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MC5XMC;;iBADIA;MACJA,8CAA2BA,mCAA0BA;K;;;;;;;;;;;;eEjDtDC;MACHA;QAEEA;QACAA,MAoBJA;;;QAdIA;QACAA,MAaJA;;MATEA;QACEA;QACAA,MAOJA;;MADEA;IACFA,C;oBC9BKC;MAEHA,6BCaAA,wEDdgBA;IAElBA,C;qBASKC;MAEHA,6BCRAA,uFDOgBA;IAElBA,C;sCEmOKC;MACDA;;;kCACMA;UAAeA,cAGvBA;;MADEA,WACFA;K;kBC/NmBC;MACnBA;;UAEIA,OAAOA,iG9BszCsCnQ,kD8BpyCnDmQ;;UAVQA,OAAOA,8IAUfA;;UAFMA,sBAAMA;;IAEZA,C;;;;;;E3CoRiCC;OAFjBC;MAAoBA,yBAAsBA;K;gBAEhDD;MAAYA,4CAA+BA;K;cAE5CE;MAAcA,yBCqJLA,2CDrJiDA;K;kBAgBzDC;MACNA,sBAAwBA,+DAAqBA;IAC/CA,C;mBAESC;MACLA,OOyzBGA,oBADGA,qCPxzByDA;K;;EAQ9CC;cAAdA;MAAcA,uBAAgCA;K;gBAU7CC;MAAYA,iCAAwCA;K;mBAGnDC;MAAeA,sCAAmCA;K;;;;;OAWpCC;MAAEA,oBAAcA;K;cAGhCC;MAAcA,aAAMA;K;gBAEnBC;MAAYA,QAACA;K;;;;;EAmDAC;gBALbC;MAAYA,QAACA;K;mBAEZC;MAAeA,0BAAQA;K;cAGzBF;MAAcA,uBAA+BA;K;;;;;cAyB7CG;MACiCA,0BAApBA;MAClBA;QAAyBA,OAAaA,oDAExCA;MADEA,oCAAkCA,0BACpCA;K;;;EAiBqBC;gBAHbC;MAAYA,QAACA;K;cAGdD;MAAcA,uBAA+BA;K;;EAqB/BE;gBAHbC;MAAYA,QAACA;K;cAGdD;MAAcA,uBAA+BA;K;;;SG9V/CE;mDAE4BA;MAN/BA;MAMAA;IACFA,C;YAyGKC;MACHA;4DACIA;MAlHJA;MAkHeA;QACbA;QACAA,MAOJA;;MAJEA;QAEEA,cAFFA;IAIFA,C;sBAEKC;MACCA;MAAMA;iBAAMA;MAChBA;QAAcA,MAKhBA;MAJEA;QAA4BA,sBAAMA;MAClCA;QACEA;IAEJA,C;WAuBYC;;MACVA,OE8HFA,kEF9HwCA,QE8HxCA,kEF7HAA;K;eAyGEC;MACWA;;MAAXA,eAAWA,OACbA;K;cA+SOC;MAAcA,OyCxKJA,mDzCwK+BA;K;gBAahCC;MAAYA,OAiI5BA,sCAEyBA,SAnIGA,+BAiI5BA,4BAjIkDA;K;gBAE1CC;MAAYA,OAAWA,qCAAoBA;K;cAE3CC;MAAUA,sBAAiCA;K;UAsCxCC;MACLA;0CAEwBA;QAASA,sBAAMA;MAC3CA,eAAOA,OACTA;K;aAEcC;mDAQ4BA;MALpCA;0CAIwBA;QAASA,sBAAMA;MI2VtCC;IJzVPD,C;mBA2CSC;MAAeA,2BIkTdC,+BJlTyCD;K;;;;;;;eAiC7CE;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAEzBC;;kBACUA;oBAAUA;eAKnBA;QACIA;QAANA;;gBAGEA;MAAJA;QACEA;QACAA,YAKJA;;MAHEA,qBAAWA;;MAEXA,WACFA;K;gBA1BGC;;K;;;;W0ClwBCC;MACFA;;QAEEA,mBAOJA;MALEA,AAAIA;QAkEmBC,0CAECA;QAnEtBD,aAIJA;;MADEA,sBAAMA;IACRA,C;mBA6HOE;MACLA;MACAA;QACEA,sBAAiBA;MAEHA;iBAEaA;;MAAzBA;kCAAOA;MAAPA;QACFA,aAGJA;0DAKgBA;MAEdA;QAEEA,kBAAMA;gBAEmBA;;gCAAKA;oBAAvBA;MACsBA;gCAAKA;uBAALA;gBAC3BA;MAAJA;QACWA;QAGKA,cAFFA;;MAhBdA,sDACFA;K;cAqBOC;MACLA;QACEA,aAIJA;;QAFIA,oBAEJA;K;gBAEQC;MACFA;;MAGJA;QAAsBA,2BA6BxBA;MAxBiBA;MACEA;MAIJA;MAWGA;MAOhBA,kHACFA;K;QAwBkBC;MAChBA;MAGAA;QAAiBA,QAOnBA;MANEA;QAAgBA,aAMlBA;MAFIA,qBAEJA;K;eAeIC;MAEFA,4DAEMA,iCACRA;K;eAEIC;MACEA;MACJA;QAEEA,mBAgBJA;MAdEA;QAGEA;UACEA,OAAOA,oBAUbA;aARSA;QAELA,OAAOA,mBAMXA;MAFEA,sBAAMA,0DACiCA,uBAAWA;IACpDA,C;uBA4BIC;MACFA;;QACMA;;;QAKAA;;MANNA,SAOFA;K;sBAOIC;MACFA,0CASFA;K;mBAiDSC;MAAeA,qCAAkCA;K;;;;EA+MlCC;mBAAfA;MAAeA,qCAAkCA;K;;;;EAWlCC;mBAAfA;MAAeA,wCAAqCA;K;;;;clBpoBxDC;6BAEqBA;qBACNA;MAAlBA;QAA0BA,YAE5BA;MADEA,iBAAgBA,4CAClBA;K;gBA8EKC;+BAOuBA;MAGRA,0BARYA;QAOLA,YAI3BA;MAHIA,qDAGJA;K;eAGOC;MAGLA,OAAOA,0BADUA,iDAAiCA,SAEpDA;K;eAJOC;;K;QAqKSC;MACdA;;QAAgBA,SAelBA;iCAdyBA;QAAaA,eActCA;MAbEA;QAEEA,uBAAYA;MAIdA;QACEA;UAA6BA;QACrBA;QACRA;UAAgBA;QAChBA;;MAEFA,aACFA;K;iBAsCIC;0BAGQA;oBAQUA;MAAlBA;QACeA;MAEfA,O1B8wBFA,oC0BxwBFA;K;cAwBOC;MAAcA,eAAIA;K;gBAMjBC;MAGFA;wBACgBA,0BAApBA;QAC8BA;QACrBA;QACAA;;MAEFA;MACAA;MACPA,gDACFA;K;mBAGSC;MAAeA,wCAAqCA;K;cAErDC;MAAUA,sBAA4BA;K;UAE9BC;MACVA;MAEEA,yBAAcA,qBAAQA;QAASA,sBAAMA;MAC3CA,eAAOA,OACTA;K;;;;;;SmBtWKC;MACCA;MAAYA;uBAAMA;MACtBA;QAAoBA,MActBA;sBAbiBA;gBACXA;aAAQA;MAAZA;QA4BcA;QACdA;UAAcA;;UA0CZC;UACGA;UACLA;UACAA;UACAA;UAEOA;;QjCmrCwChU;QiC7tC/C+T;aACAA;;;MA/BEA,8CAAiBA;WAMnBA;IACFA,C;aAkCUE;MACJA;MAAqBA,SAArBA;QAAcA,+CAIpBA;MAHEA,OjCstCEC,eArCSD,oBkCpaJA,kBD5wBkBA,0CAARA,gBAAgBA,QAAQA,kBAAeA,sBAE1DA;K;cAEQE;MAAUA,8BAAOA;K;;;;cLxGlBC;MAELA,yCADcA,SAIhBA;K;;;;;gBpCHgBC;MAAYA;aAqT5BA,0BAEuBA,yBAvTKA,uBAqT5BA,wCArTiDA;K;WA8IrCC;;MACRA,OA4PJA,2EA5PmCA,gBA4PnCA,+EA5P6CA;K;;;eA2KvCC;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAIzBC;MACoBA;kBAAVA;;kBAAUA;eACnBA;QACFA,sBAAMA;gBAEJA;MAAJA;QACEA;QACAA,YAKJA;;MAHEA,8BAAWA;;MAEXA,WACFA;K;2BAxBGC;;K;;;;gBA0CaC;MAwBhBA,aAxBiDA;MAArBA,4BAA+BA,yBAAUA,KAAzCA,sBAwB5BA,+BAxBwEA;K;cAGhEC;MAAoBA,aAAVA;8BAAgBA;K;;;;cAuB7BC;;kBACCA;;QACFA,8BAAWA,gBAAaA;QACxBA,WAIJA;;MAFEA;MACAA,YACFA;K;eAEMC;MAAoBA,aAATA;kCAASA,sBAAIA;K;2BAf3BC;;K;;;EA6BuBC;cAAlBA;MAAUA,qCAAcA;K;eAC9BC;MAAwBA,sBAAGA,sCAAyBA;K;;EAsBtDC;gBAXgBA;MAAYA,2BAA2BA,sBAAVA,4BAAoBA,KAWjEA,qCAXoEA;K;WAGxDC;MAlEZA;MAmEIA,iEAA6BA,gBAnEjCA,8DAmE2CA;K;;;cAStCC;MACHA;oBAAOA,qBACDA,KADCA;kCACDA,UAAaA;UACfA,WAINA;MADEA,YACFA;K;eAEMC;MAAWA,OAAUA,IAAVA,wBAAiBA;K;;;;;gB2Chc1BC;qBACMA;MACZA;QAAkBA,WAKpBA;MAH8CA,oDAANA;;MAEtCA,WACFA;K;cAGAC;MAAcA,wBAAUA,wBAAQA;K;OCTlBC;MAAEA;oBAAyDA;MAAvCA,wCAAmBA,2BAAeA,iBAAKA;K;;;;ECmB5CC;cAAtBA;MAAcA,kCAAyBA;K;;;;cAqEtCC;MAAUA,mBAAQA,OAAMA;K;aAEpBC;qBACCA;MACXA;QAuDKA,uBAtDmBA;;;MAGxBA,WACFA;K;iBAWKC;MACHA;QAAoBA,YAGtBA;MAFEA;QAAwBA,YAE1BA;MADEA,OjDugFKA,IiDvgFmBA,6BAC1BA;K;UAEYC;MACLA;QAAkBA,WAGzBA;MADEA,WAAsBA,QAAfA,KADoBA,SAAfA,MAEdA;K;aAEKC;MACGA;;MAAOA;mBACEA;oBACUA,gBAAzBA;QAGEA,aAFQA,WACEA;IAGdA,C;YAEgBC;MAAQA,OAkCxBA,oBAlCyCA,kBAkCzCA,qCAlC+CA;K;;;cAoCvCC;MAAUA,qBAAUA,OAAMA;K;gBAIKC;MAUvCA,aAT4CA;MAAxCA,mDASkEA,SAAtEA,uDATsDA;K;;;eAWhDC;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAEzBC;;kBACCA;qBAAUA;QACZA;QACAA,YAKJA;;MAHEA,oCAA6BA,UAAlBA;;MAEXA,WACFA;K;4BAbGC;;K;;;;kBjDuBQC;mBACLA;MgD7MAA;QhD6MuBA,SAE7BA;MADEA,WAAOA,4BAA6CA,gBACtDA;K;2BAiBSC;MACPA;eAfmBA;QAeLA,QAAOA,WASvBA;gBAPMA;MAAWA;0CAA6BA,sBAApBA,6BAA6BA;MACrDA;QAAwBA,QAHHA,WASvBA;MElIqBA;MF8HnBA;QACEA,UAASA;;MAEXA,WACFA;K;sBAEyBC;MACvBA;eAzBqBA;QAyBLA,QAAOA,UAWzBA;gBAV2BA;MAAoBA;;gBAEzCA;MAAWA;+EAA8BA;MAC7CA;QAA6BA,QAJNA,UAWzBA;MQzOAA;MRoOEA;QACEA,iBgDpPEA,ahDoPoDA,YAAnBA,oBAC/BA;MAENA,OiDxQFA,gEjDyQAA;K;;;;UA6kB2BC;MACrBA;MAAkBA;;QAAlBA,WAAUA;MACVA;MACAA;;IAEDA,C;;;;oBAilBLC;;gCAEyDA,WAD3CA;MAEZA;QAAmBA,WAmBrBA;MAlBeA;gBACTA;MAAJA;;gBAGIA;MAAJA;;gBAGIA;MAAJA;;gBAGIA;MAAJA;;gBAGIA;MAAJA;;MAIAA,aACFA;K;;;cAmNOC;MACLA,iDACFA;K;;;cAaOC;;;kBACDA;MAAJA;QAAqBA,oCAA4BA,qBAMnDA;gBALMA;MAAJA;QACEA,iCAA0DA,2BAI9DA;MAFEA,iDACoDA,2BACtDA;K;;;cAQOC;mBAAcA;e0BzyCDA,wC1ByyCgDA;K;;;cAQ7DC;MAGLA,iCAD6BA,kEAE/BA;K;;;;cAyMOC;;iBACDA;MAAJA;QAAoBA,SAQtBA;eAL+BA;wDAEnBA;MAEVA,WAAOA,oCACTA;K;;;;cA+nBOC;MAMcA,uBAJDA;0DAEeA;MAEjCA,6EACFA;K;;;;;;;;;;;;;cAqBOC;sBAEDA;MACJA;QAAkBA,yCAEpBA;MADEA,qBAAmBA,4BACrBA;K;;;OA6BcC;MAAEA;oBAKhBA;MAJEA;QAA4BA,WAI9BA;MAIyBC;QAPKD,YAG9BA;MAFEA,WARoBA,oCASMA,oBAAiBA,UAC7CA;K;gBAGQC;MAENA,6BADsCA,cACDA,gCAfjBA,iBAgBtBA;K;cAGOC;MAGLA,yBAzBkBA,uCA3mEJA,gCAqoEgCA,kBAChDA;K;;;cA+LOC;MAELA,yCADwBA,6CAI1BA;K;;;cAOOC;MAAcA,8BAAgBA,QAAQA;K;;EA4kBKC;cAA3CA;MAAcA,uDAA0CA,SAAQA;K;;;EQ1iGvEC;cA5SQC;MAAUA,+BAAOA;K;YAITD;MAAQA,mEAwSxBA,wCAxS0DA;K;iBAMrDE;wBAEaA;MACdA;QAAqBA,YASzBA;MARIA,cA8OKC,aAtOTD;K;UAmBYE;MACVA;;sBACgBA;QACdA;UAAqBA,YAWzBA;sBAqMSA;wCA9MyCA;QAA9CA,SASJA;aARSA;mBACMA;QACXA;UAAkBA,YAMtBA;mBAqMSA;QAvMEA,gCAFuCA;QAA9CA,SAIJA;;QAFIA,8BAEJA;K;iBAEGC;;mBACUA;MACXA;QAAkBA,WAMpBA;MA0KaA,aAqBJC;MAnMKD;MACZA;QAAeA,WAGjBA;MADEA,aADyBA,OAClBA,iBACTA;K;aAEcE;;;MACKA;MAGkBA;MAHnCA;uBACgBA;QAEdA,8DADqBA,YAAqBA;aAErCA;oBACMA;QAEXA,2DADkBA,SAAeA;;oBAQxBA;QACXA;UAAiCA,YAAfA;QACPA;qBA4KJC;QA1KPD;UAC2BA;;UAGbA;UACZA;kBAC2BA,OACpBA;;YAGLA,YADyBA;;;IAhB/BA,C;iBAsBEE;;;MACgBA;wBACNA;MADNA;QAA6BA;QAAXA,oBAAiBA,wBAIzCA;;MAHYA;MACNA;MACJA,YACFA;K;YAEGC;MAEQA,0CAAsBA;MAA7BA,SAMJA;K;aA4BKC;MACgBA;;kBAAOA;2BACNA;aACpBA;QAGEA,kBAFQA,qBACEA;mCAEWA;UACnBA,sBAAMA;mBAEIA;;IAEhBA,C;oCAEKC;;;MAC4CA;MAEEA;kBA2F1CA;MA5FPA;QAC6BA;;YAEtBA;IAETA,C;2BAEGC;MACDA;;QAAmBA,WAMrBA;kBA8ESA;MAlFPA;QAAkBA,WAIpBA;MAHEA;;MAEAA,WAAOA,iBACTA;K;eAEKC;UAKHA,sBAAkBA;IACpBA,C;oBAGkBC;;;eA6GlBA,wBA5G6CA,2BAAKA;eAC5CA;aACFA,eAASA;;kBAEgBA;UAAKA;YACzBA;aACLA,WAAaA;;;MAGfA;MACAA,WACFA;K;iBAGKC;;uBACgCA;mBACJA;MAC/BA;aAEEA;;gBAESA;MAEXA;aAEEA;;YAEKA;;MAGPA;IACFA,C;6BAaIC;MACFA,OAA4BA,iCAC9BA;K;6BAOIC;MACFA;;QAAoBA,SAOtBA;sBANeA;MACbA;QAEWA,iBADgBA,GAChBA;UAAuBA,QAGpCA;MADEA,SACFA;K;cAEOC;MAAcA,OAAQA,2BAAiBA;K;mBAwB9CC;MAIcA;;;MAMZA,YACFA;K;;;;;cAkBQC;MAAUA,gBAAKA,oBAAOA;K;gBAGdC;MA2BhBA,aA1BqCA;MAAnCA,4CAA8CA,mBA2B7BA,SADnBA,gDAzBAA;K;;;eA6BMC;MAAWA,gCAAaA;K;cAEzBC;;kBACmBA;eAAlBA,sBAAuBA;QACzBA,sBAAMA;kBAEGA;MACXA;QACEA;QACAA,YAMJA;;QAJIA,mCAAWA;aACXA,aAAaA;QACbA,WAEJA;;K;4BArBGC;;K;;;EP6BqBC;UAAPA;MAAOA,WAA0BA,UAAUA;K;;;EAErCA;UAAnBA;MAAmBA,WAA6BA,sBAAsBA;K;;;EAEtDA;UAAhBA;MAAgBA,WAAeA,iBAAiBA,iBAAIA;K;;;EWg9BpD/Y;mBA/zCKgZ;MAAeA,4BAAUA;K;iBAElBC;MACdA,mEA+zCEjZ,gDA9zCJiZ;K;iBAFgBC;;K;;;;;;cAkUDC;MA8DfA;QA5DIA,uDAAyCA,QAO7CA;;QAFIA,eAAOA,OAEXA;K;sBAwBKC;MAIgBA;MAAjBA;IAEJA,C;oBAEKC;MACHA;QAGEA;IAEJA,C;;;iBAmBgBC;MAnYSA,wDAoYRA;MADDC;MAEdD,aACFA;K;iBAHgBC;;K;;;;mBAqIPC;MAAeA,0BAAQA;K;cAgP3BC;MAAQA,0CAAkCA;K;;;;;cA8EvCC;MAAUA,sBAAgCA;K;;;;UA2BlCC;MACGA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;;cA+BKC;MAEHA;MACIA;MAlhBJA;6BAsdwBA;MACxBA;MACAA;MACAA;QAAiBA,kBAAiBA;MACtBA;6BAIcA;MAC1BA;QACEA,kBAAMA;MAKGA;MAEXA;MA6CEA,MAGJA;K;;;;;;mBAmBSC;MAAeA,6BAAWA;K;;;;;mBA0C1BC;MAAeA,6BAAWA;K;;;;;mBA0C1BC;MAAeA,2BAASA;K;UAEpBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA0CSC;MAAeA,2BAASA;K;UAEpBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA0CSC;MAAeA,0BAAQA;K;UAEnBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA6CSC;MAAeA,4BAAUA;K;UAErBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA0CSC;MAAeA,4BAAUA;K;UAErBC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBA2CSC;MAAeA,kCAAgBA;K;cAEhCC;MAAUA,sBAAgCA;K;UAErCC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;;;;;mBAsDSC;MAAeA,2BAASA;K;cAEzBC;MAAUA,sBAAgCA;K;UAErCC;MACMA;MAAjBA,4CAAmCA;MACnCA,eAAOA,OACTA;K;aAQgBC;MAGdA,OASEA,eAVWA,yBADFA,uCAAkCA,UAG/CA;K;aAJgBC;;K;;;;;;;;ENryBCC;WAnabA;MAEFA,yCA8ZsB3c,4BA7ZxB2c;K;WAKIC;MAA8BA,OAmajBA,qBAXOC,iCAxZmDD;K;;;EAylCtDE;cAAdA;MAAcA,0BAAaA,YAAWA;K;;;cA0VtCC;MAAcA,0BAAQA;K;;;;UQrhDzBC;;cACUA;QACRA;MACCA;IACHA,C;;;;UAMOC;MAELA;MAAiBA,WAAjBA;eAG4DA;eACxDA;;IACLA,C;;;;UASHC;MACEA;IACFA,C;;;;UAOAC;MACEA;IACFA,C;;;;gBAkCF5R;cAgEOA;QAxDOA,gBACNA,yBAPiBA;;QASrBA,sBAAMA;IAEVA,C;;;UAXI6R;MAGEA;IACFA,C;;;;cAmECC;;;wBAEMA;;QAAuBA;gBAC3BA;QACHA;;kBAGAA;oCAFeA;UAEfA;;UAEAA;;IAEJA,C;mBAEKC;mBAGDA;cADEA;QACFA;;QAEAA;IAEJA,C;;EAsEgBC;UAAZA;MAAYA,0CAAgDA;K;;;;UAEvCA;MAGvBA,4Bd66CFA,oCc96CoCA;IAEnCA,C;;;;UA0C0CC;MACzCA,IAAkBA,YAAWA;IAC9BA,C;;;EG/SsBC;cAAhBA;MAAcA,eAAEA,OAAMA;K;;;;;;;;ciCoBxBC;IAAYA,C;eAIZC;IAAaA,C;oBAnCSC;;K;wBACAC;;K;;;oBAkIlBC;MAAgBA,WAACA,WAAuBA;K;gBA+C3BC;;;0BAWlBA;MAJsCA;gBA9EpBA;Q9BwlBtBA,oCJzD2BC,gBIyD3BD;QAGEC,oBAAkBA;QAClBA;UACEA,eJmmC0CA;QkClnD1CD,SAeJA;;YlCkc2BE;;;oCkC/cFF,gClCknD0CG;MI3rDpDD;MAgE8BE;MJynDCC;MkCxyD9CL;;MAOUM;MAARA;MAoIAN;kBAAaA,oBAAeA;qBAESA;MACrCA;MACaA;MACAA;MACbA;QACEA;;QAEQA;eA4CIA,4BAAoBA;QAEhCA,mBAAYA;MAEdA,mBACFA;K;oBAwBMO;MzCwXNA,SyC7esBA;QAuHlBA,oEAIJA;MADEA,OzCmXFA,kEyClXAA;K;sBAyDKC;;;;gBAhKkBA;MAmKrBA;QACEA,sBAAUA;0BA1JOA;MA8JnBA;QAAcA,MAgChBA;MA7BYA;WAOVA;yDAEAA;yBAtSkCA;QAuShCA;sBACeA;UACbA;;6BAE+CA;UAC/CA;YA3JkCA;mCAAaA;YAEnDA;cAEEA;;cAESA;YAEXA;cAEEA;;cAEKA;YAG2BA;YAArBA;;;;;qCAkJmBA;;;eAtLbA;QA4LjBA;IAEJA,C;mBAEKC;eA9NiBA;iBAmOHA;eACFA;MAGfA,kBAAYA;IACdA,C;0BA5R2BC;;K;yBACAC;;K;;;;;EAqSIC;oBAAtBA;MAAgBA,kFA9NFA,kBA8NkCA;K;oBAEzDC;MzCyPAA,SyCzduBA;QAkOnBA,uCAKJA;MADEA,OAAaA,sDACfA;K;eAEKC;MACHA;MAKyBA;gBArONA;MAgOnBA;QAAcA,MAehBA;sBA7PuCA;;QAmPnCA;;iBArOiBA;UAwOfA;QAEFA,MAKJA;;MAHEA,yBAAiBA;IAGnBA,C;;;UAHmBC;gEACfA,kBAAaA,WAAKA;IACnBA,C;cAFgBC;;K;;;mBnC3UdC;;iBACEA;aAwSmBA;QAxSEA,sBAAUA;MACFA;MAsBlCA,6BAtBWA,YAAQA;IAErBA,C;mBAJKC;;K;;;cAkBAC;;;wBAEmBA;eADjBA;aAsRmBA;QAtREA,sBAAUA;MACpCA,oBAAoCA;IACtCA,C;;;sBA6HKC;MAEIA,SApCiBA;QAmCLA,WAErBA;MADEA,WAxCiBA,OAAOA,oBgC7FEC,mChC6GeD,sBAwBkBA,iCAC7DA;K;iBAEYE;;6BAEeA;;;;uBAaVA;kBA1DEA,OAAOA;MAiDNA;QACPA,uDAGIA;;QAGJA,yBACOA;;QAOTA;QAAPA,SAiBJA;;QAhBIA,wBAFFA;oBA7DwBA;YAkEpBA,sBAAMA;UAORA,sBAAMA;;UAZRA;;IAkBFA,C;;;oBA2HUC;;;sCAcgDA;qBC0R/BA;2BDtSEA;QAEbA,+DACAA;UACVA,sBAAoBA;;sECq8CyCA;QDx7CnDA;;MAzDhBA;MA6DEA,oBA1PFA;MA2PEA,aACFA;K;kBAMUC;;;sCAEiDA;MAvE3DA,wBAAyBA,gBAAzBA;MAuEEA,oBA5PFA;MA6PEA,aACFA;K;qBAkFKC;UAEHA,cAAwBA;UACxBA;IACFA,C;kBASKC;UAGHA,gBACYA,mBAAkCA;UAC9CA,4BAA4BA;IAC9BA,C;kBAEKC;;kBAtJDA;MAwJFA;QACWA,iFAAgBA;aACzBA;;QAEAA;UArCKA;qBArHgBA;YAgKjBA;YACAA,MAURA;;UARMA;;QC2zCJA,0CDvzCEA,QCuzCuCA,wBDvzCfA;;IAI5BA,C;uBAEKC;MACHA;;;QAAuBA,MA+BzBA;gBA/MIA;MAiLFA;QACmBA,4EAAoBA;aACrCA;QACAA;0BAEiCA;UAC/BA;wBAEgBA;gBAETA;;;QAGTA;UAvEKA;qBArHgBA;YAkMjBA;YACAA,MAURA;;UARMA;;QAGUA,MAAZA;QCsxCFA,0CDrxCEA,QCqxCuCA,wBDrxCfA;;IAI5BA,C;sBAEiBC;MAIEA,qEAAUA;MAEpBA,IADPA;MACAA,wCACFA;K;uBAEiBC;MACEA;MAEjBA;sBACkCA;eACxBA;;MAIVA,WACFA;K;yBASKC;MAKHA;;;QAEEA,wBACEA,kDASSA;;QAXbA;QAgBEA;QAKAA,oBAAkBA;;IAItBA,C;wBA0EKC;MAGcA;MACPA;MADmBA;WA5N7BA;WACAA;MA6NAA;IACFA,C;2BAEKC;MAEHA;iBA7VqBA;kBA6VIA,iBAA6BA;QAA9BA;;QAAHA;MAArBA;QACEA,MAKJA;MAH+BA;MAC7BA;MACAA;IACFA,C;oBAEKC;MAGcA;MACPA;MAAOA;MADYA;MAnO7BA,uBEteFC;MF2sBED;IACFA,C;oBAGKE;;uBAaCA;kCAAMA;QACRA;QACAA,MAGJA;;MADEA;IACFA,C;6BAqCKC;MACHA;;;MCujCAA,0CDtjCAA,QCsjCyCA,wBDtjCjBA;IAG1BA,C;kBAMKC;;8BAECA;MAAMA;QAERA;QACAA,MAIJA;;MADEA;IACFA,C;yBAEKC;;MCkiCHA,yCD9hCAA,QC8hCyCA,wBD9hCjBA;IAG1BA,C;;;;UA5R4BC;MACtBA,oCAAsBA,YAAMA;IAC7BA,C;;;;UAgCuBC;MACtBA,oCAAsBA,mBAAMA;IAC7BA,C;;;;UAwCCC;;iBAEEA;;;QAEEA,wBAAyBA;;QAD3BA;QAEEA;QACAA;;IAEHA,C;;;;UACQA;MAEPA;IACDA,C;;;;UAOeA;MAChBA,gCAAeA,QAAGA;IACnBA,C;;;;UAkD4BC;MAC7BA,sCAAiBA,aAAQA;IAC1BA,C;;;;UA4GuBC;MACtBA,oCAAmBA;IACpBA,C;;;;UAsBuBC;MACtBA,gCAAeA,YAAOA;IACvBA,C;;;;UAoEGC;MAMMA;;yBAEeA;QA3nBlBA,mBArFUC,OAAOA,egC7FEC,6BhCkHYD;;QA0rBhCD;QAEEA;QA/ZDA,SAgaKA,8CAAsBA,OAha3BA,oBAgayCA;;UAhazCA,EAiaGA,yDAAuBA,OAja1BA;;UAmayCA;UAAGA;;YEj3BtBA;;UAF/BA,EFm3BYA;;;UAEFA;QACAA,MA2BJA;;gEAjjBmBA;2BACFA;;UA+GdA,EA0aGA,2DA1aHA;YA2aGA;;QAGFA,MAmBJA;;;qCAbyBA;QA7jB/BG,2CA+pB4BH;QAhGlBA,gCACEA,sGAGSA;;UAIXA;UACAA;;IAEJA,C;;;;UAVMI;MACEA,8CAAmCA;IACpCA,C;;;;UACQA;MACPA,mCAA4BA,qBAAGA;IAChCA,C;;;;UAOPC;MACEA;;;eACyBA;;;QAptBiBA,gBAotBIA;QAptB7CA,EAotBCA,0BAvvBSC,OAAOA,oBASjBA,oBgCtGmBC,MhCsGiBD;;QA6uBrCD;QAEEA;QACsCA;QAAGA;;UEr5BpBA;;QAF/BA,EFu5BUA;UACAA;;IAEJA,C;;;;UAEAG;MACEA;;QA7cCA,8CA8cyBA,OA9czBA;;QA+cKA,oDACAA,SAtvBYC;UAuvBSD,EAAvBA,0BAAuBA;YACvBA;;;QALJA;QAOEA;QApdDA,sCAqdeA,OArdfA;cAqd6BA;;YAC1BA;;;UAEsCA;UAAGA;;YEt6BtBA;;UAF/BA,EFw6BYA;;;UAEFA;;IAEJA,C;;;;;cIgZUE;MJx/BhBA;gCAAyBA;QI0/BnBA;MACJA,2CACEA,6CAIQA,0CADQA;MAMlBA,aACFA;K;;;UAVIC;;;IAECA,C;cAFDC;;K;;;UAIQD;mBACNA;;8BJ9pBAA,eI8pBiBA;oBJvpBUA;MAlNVE;QADrBA;QACAA;MAoNEF;IIspBCA,C;;;EKvxC0BG;gBH8xBvBA;MAAYA,+EAAiCA;K;OAEvCC;MAAEA;oBAIhBA;MAHEA;QAA4BA,WAG9BA;MAFEA,mDACoBA,4BAAkBA,mBACxCA;K;;;cAkBKC;0BACHA;IACFA,C;eAEKC;0BACHA;IACFA,C;;;UDtkBKC;;;MAISA;gBApCWA;MAkCvBA;QAAiBA,MAMnBA;MALEA;QACEA;;QAEAA,oBAiQJA;IA/PAA,C;cA2BKC;IAELA,C;eAEKC;IAELA,C;iBAaKC;;uBACWA;;QAgPZA,+BAhPyBA,uBAgPzBA;QAhPYA;;yBA0SEA;MAChBA;eACEA,4BAAoBA;;eAEpBA,6BAA6BA;gBAlYRA;MAsFvBA;QACEA;;QACAA;UACEA;;IAGNA,C;eAIKC;;;MAM4BA;gBA3GLA;WA0G1BA;MACAA,qCAAsBA;;MAEtBA;IACFA,C;iBAsFKC;;kBA/LoBA;mCAiMJA,SAAQA;kBACzBA;;QACmBA;UAhMgBA;sBAAIA;uCAwXvBC;;;QAxLhBD;UACEA;;;;aAKJA;QACEA;UACEA;UACAA,MAgBNA;;QAjO0DA;QAoNtDA;UAAqCA;aACrCA;QACAA;UACEA;;UAEAA;;;MAKJA;QACUA,KAARA;IAEJA,C;gBA7XmBE;;K;;;;;yCAuYGC;;0BAQlBA;MAEAA;MAIFA,OC2UGA,yDAAuBA,qDD1U5BA;K;YAfsBC;;K;;;;;cAgIjBC;MACHA;;gBARsBA;MAQtBA;QAAiBA,MAcnBA;MAZEA;aAEEA;QACAA,MASJA;;MAPEA,oBAAkBA;WAMlBA;IACFA,C;;;UAPoBC;;iBACDA;;QACfA;MACAA;QAA+BA,MAEhCA;+CA4BaA,QA7BDA;iBAuBSA;wBAAiBA;QAEvCA;MACAA;UACEA;2DAvGFC,QAASA,kBAAUA;IA6ElBD,C;;;;kBAwJEE;;gCACoBA;MACvBA;aAEEA;oBACIA;QAAJA;UACEA;UACAA;;;aAIFA;IAEJA,C;eAvFiBC;;K;;;;;;UJ+wBcC;MACvBA,gCAAoBA,YAAOA;IAClCA,C;;;;gBA0PIC;MACHA;;;aACgBA,kBAAgBA;UAC5BA;UACAA,MAMNA;;QAJIA;;QALFA;QAMEA;QA8DFA,mBAAiBA,qBAAOA;;IA3D1BA,C;uBAEKC;MACHA;;;;aACgBA,kBAAgBA;UAC5BA;UACAA,MAMNA;;QAJIA;;QALFA;QAMEA;QAkDFA,mBAAiBA,qBAAOA;;IA/C1BA,C;yBA4BgBC;MACdA,OAAOA,6EACTA;K;UAYiBC;MAAmBA,WAAIA;K;WAetCC;wBACgDA;WAA7BA,oBAAUA;QAAYA,iBAE3CA;MADEA,OAAOA,mCACTA;K;gBAGEC;qDACgDA;MAAEA;MAAFA,KAA7BA,oBAAUA;QAAYA,oBAE3CA;MADEA,OAAOA,iDACTA;K;iBAEEC;mEACgDA;MAAEA;MAAMA;MAARA,KAA7BA,oBAAUA;QAAYA,2BAE3CA;MADEA,OAAOA,6DACTA;K;8BAM8BC;MAEzBA,0EAACA;K;;EAlDSC;UAANA;MAAMA,mCAAgBA,GAAEA;K;;;EMr1CjCC;cA9WQC;MAAUA,+BAAOA;K;YAITD;MACdA,uCAyWFA,2CAxWAA;K;iBAMKE;MACHA;;sBACgBA;QACdA,wCAkOUA,aA3NdA;aANSA;QAIEA,WAHIA;QACXA,kCA+NUA,aA3NdA;;QAFIA,+BAEJA;K;kBAEKC;qBACQA;MACXA;QAAkBA,YAGpBA;MADEA,OAAOA,wBADMA,uCAEfA;K;UAYYC;MACVA;;sBACgBA;QAC8BA;QAA5CA,SAOJA;aANSA;mBACMA;QAC8BA;QAAzCA,SAIJA;;QAFIA,OAAOA,gBAEXA;K;UAEGC;;mBACUA;MACXA;QAAkBA,WAIpBA;MAHeA;MACDA;MACZA,gCAA4BA,WAC9BA;K;aAEcC;;;MACKA;MAGkBA;MAHnCA;uBACgBA;QAEdA,kDADqBA,wBAAqBA;aAErCA;oBACMA;QAEXA,+CADkBA,qBAAeA;;oBAQxBA;QACXA;UAAiCA,YAAfA;QE3GkBC;qBF6GvBD;QACbA;UACEA;;eAEAA;;UAEYA;UACZA;;;YAGEA;;iBAEAA;;;;IAlBNA,C;aAiEKE;;;;MACSA;yBACkBA,gBAErBA,uBAAeA,kBAFxBA;kBACYA;QACHA;QAASA;QAAhBA,gCAAsBA;0BACUA;UAC9BA,sBAAMA;;IAGZA,C;kBAEKC;;sBACUA;MACbA;QAAoBA,aAiDtBA;MAhDgBA,iCAAOA;qBAIPA;MAHFA;MAIZA;QACcA;uBACEA;QACdA;+BACeA;UAEbA;;;kBAKOA;MACXA;QACcA;uBACEA;QACdA;;UAKEA;;;kBAKOA;MACXA;QACcA;uBACEA;QACdA;uBAEeA,MADHA;0BAEGA;UACbA;kCACYA;YAEVA;;;;MAMNA,YADAA,2BAEFA;K;wBAEKC;;MACwBA;MAIAA;eAkCfA;;YApCVA;;MAEFA;IACFA,C;gBAiEMC;MAEJA,YAAOA,CEjT6BJ,mCFkTtCI;K;;;sBAiCIC;MACFA;;QAAoBA,SAMtBA;sBALeA;MACbA;mBACgBA;QAAdA;UAAkDA,QAGtDA;;MADEA,SACFA;K;;;cAoDQC;MAAUA,4BAAKA,oBAAOA;K;gBAIdC;MAyBhBA,aAxBgCA;MAA9BA,qCAAoCA,qBAwBtCA,2CAvBAA;K;;;eAyBMC;MAAoBA,aAATA;kCAASA,2BAAIA;K;cAEzBC;;oBACQA;sBACEA;kBACmBA;qBAAKA;QACnCA,sBAAMA;6BACaA;QACnBA;QACAA,YASJA;;QAPIA,mCAAWA;aAIXA;QACAA,WAEJA;;K;4BAtBGC;;K;;;ElB7HHC;gBuCxTgBA;MAAYA,oCvC0TLA,2BuC1TKA,yBvCwT5BA,oCuCxTiDA;K;eAE/CC;MAAwBA,OAAIA,4BAAOA;K;WAyIzBC;;MAA0BA,OvCmQtCA,2EuCnQqEA,QvCmQrEA,2EuCnQuEA;K;cA6WhEC;MAAcA,OAWJA,mDAXsBA;K;;;apBhgBlCC;;;;MACWA,gCAAdA,4BACwBA,WADxBA;;QACkBA;QAAhBA,gCAAsBA;;IAE1BA,C;cAoEQC;MAAUA;aAAKA,iBAAMA;K;cAItBC;MAAcA,kCAAiBA;K;;;;UAaxBC;;;aACHA;YACHA;QAEFA;eACAA;MC2YWA;;QA2BfC;MA3BeD;;IDxYZA,C;;;;EA0M0BE;UAAnBA;MAAmBA,2CAASA;K;aAgBnCC;MACHA,2EAAaA;IACfA,C;cAIQC;MAAUA,4BfvTAA,oBeuTWA;K;YACbC;MfZhBvM,aeYwBuM;iDfpTAA,oBAwSxBvM,wCeYiCuM;K;cAE1BC;MAAcA,OfjEQA,0BeiERA,kBAAeA;K;;;;;;;aE9F7BC;MACLA;MAAIA;gBkB7McA;MlB6MlBA;QAAmBA,SAIrBA;MAsCAA,8FAxCuBA;MACPA,EAD2CA;MACzDA,wCACFA;K;;;YAmEWC;MAILA;MAaFA;eAXsBA;MACPA;MACAA;MAEEA;MACnBA;QACEA;MbkgC6ChkB;Ma//BtCgkB,IAATA,oDACEA;MASFA;QAAsBA,aAIxBA;MADEA,WACFA;K;;;aA2LUC;MACSA;yDAAkCA;MACnDA;QAAkBA,ObozB6BjkB,iBa/yBjDikB;MAQIA;MAXWA;QAAiCA;kBA8H1CA;MAAJA;QACEA,kBAAMA;MAERA;QACEA,kBAAMA;aAMRA;MAtIAA,SACFA;K;;;YA2GWC;;kBAIWA;MAApBA;QACWA,KAATA;QACAA,WAMJA;;MAJEA;QAAkBA,Ob4rB6BlkB,iBaxrBjDkkB;MAHeA;MACJA,KAATA,mFAAmDA;MACnDA,aACFA;K;;;;;UD5D2BC;MAClBA;MACsBA;eADzBA;;6BAASA;ewBvkBgCC;QxBkgB7CX;;MAwEmBU;;QACfA;IACDA,C;;;;OjB7ZSE;MAAEA;oBAIQA;MAHpBA,0CAlC8BA,gCA2BXA,4CAUnBA,gBAAeA,MAAKA;K;gBAGhBC;MAAYA,OAAOA,kBAAKA,aAAQA,cAAaA;K;coBwa9CC;MACMA;mCpB3ccA;YoB4cdA,sBpBzceA;YoB0cfA,sBpBvcaA;YoBwcbA,sBpBrccA;coBscZA,sBpBnccA;coBocdA,sBpBjccA;aoBkcfA,wBpB/boBA;kBAGXA;6BoB6beA;;eAChCA;QACFA,2EAIJA;;QAFIA,qEAEJA;K;;EwB3hBqBC;cAAdA;MAAcA,6BAAeA;K;;E3B6JKC;kBAAzBA;MAAcA,2CAAkCA;K;;;cf1IzDC;mBACDA;MAAJA;QACEA,8BAAkCA,wBAGtCA;MADEA,yBACFA;K;;;;kBAqFWC;MAAcA,kCAAoBA,wBAAwBA;K;yBAC1DC;MAAqBA,SAAEA;K;cAE3BC;MAI6CA;qBAH9BA;;uBAEGA;;iBAELA;MAGGA,UAFhBA;QAAWA,aAKlBA;MADEA,uDAD0BA,qBAAaA,yBAEzCA;K;;;;;EAW+BC;oBAAtBA;MAAgBA,qBAAMA,cAAYA;K;kBAsKhCC;MAAcA,mBAAYA;K;yBAC1BC;;oBAGSA;kBACFA;MAChBA;QAEgDA;WAGzCA;QAC0CA;WAC1CA;QACoCA,gEAAQA;;QAKXA;MAExCA,kBACFA;K;;EAkB8BC;oBAAtBA;MAAgBA,oBAAMA,cAAYA;K;kBAgF/BC;MAAcA,mBAAYA;K;yBAC1BC;MAjFmBA;QAqF1BA,qCAMJA;mBAJMA;MAAJA;QACEA,+BAGJA;MADEA,0CACFA;K;;;;;;ceyDOC;MAzFPA;;YA2FSA;wBACSA;0BAEdA;;UA5DF3B;QA8DmB2B;;cACfA;;MAKFA,KAFmBA,8BAEIA;MASGA,yCAAaA;MACbA;MAG1BA,uDALkCA,kBwB9kBShB,8FxB8lB/CgB;K;;;cfxDOC;MAAcA,uCAAyBA,QAAQA;K;;;cAc/CC;MAELA,oCADmBA,QAIrBA;K;;;cAoBOC;MAAcA,2BAAaA,QAAQA;K;;;cAcnCC;mBACDA;MAAJA;QACEA,kDAIJA;MAFEA,sDACaA,8BACfA;K;;;cAOOC;MAAcA,sBAAeA;K;kBAEpBC;MAAcA,WAAIA;K;;;;cAO3BC;MAAcA,uBAAgBA;K;kBAErBC;MAAcA,WAAIA;K;;;;cmBrnB3BC;MAGLA,2BAFuBA,QAGzBA;K;;;cAmDOC;;sBAEkBA;;qBAIJA;qBACGA;0CAEiCA;MAArDA;QAIIA;MAAJA;kBACaA;UACAA;QAEXA,6BAgENA;;kGA3DIA;QACaA;mCAAOA;QAAPA;QACXA;UACEA;YACEA;UAEUA;UAzBdA;eA2BOA;UACLA;UACYA;UA7BNA;;;MAsEDA;MA/BTA;QACaA;mCAAOA;QAAPA;QACXA;UAKWA;UAHTA;;;MA3CiBA;MAmDrBA;QAvCuCA;QA2CrCA;UACQA;;;UAEDA;YACGA;;YA3DSA;;YA+DTA;YACFA;;UApD6BA;;;QAwDAA;QAAPA;QApEXA;;MAsErBA,yBAFeA,sEAEyBA,oDADCA,gBAS7CA;K;;;WCsEYC;;MAA4BA,qFAA2BA,gBAA3BA,6BAAqCA;K;cA2RrEC;MAGiBA;;MACvBA,gBAAOA;QACLA;MAEFA,YACFA;K;eA+QEC;MACWA;;MACSA;MAEpBA,wBAAOA;QACLA;UAAoBA,OAAgBA,sBASxCA;QARIA;;MAEFA,sBAAiBA;IAMnBA,C;cAgBOC;MAAcA,uDAAqCA;K;;ELhvBhCC;gBAAlBA;MAAYA,oDAAcA;K;c4B/C3BC;MAAcA,aAAMA;K;;E5B8BIC;OAHjBC;MAAoBA,qBAAsBA;K;gBAGhDD;MAAYA,wCAA+BA;K;cAG5CE;MAAcA,yBxBmaLA,uCwBnaiDA;K;kBAGzDC;MACNA,sBAAwBA,2DAAqBA;IAC/CA,C;mBAGSC;MAAeA,yCAAgCA;K;;;;;;c6BhBjDC;MAAcA,SAAWA;K;;;;c7B6cxBC;MAAUA,qBAAUA,OAAMA;K;cA4B3BC;mBAAuCA;MAAzBA,sCAAmCA;K;;;UQzfxDC;MAEEA;MAAIA;QACFA,QAoBJA;eAlBMA;;QACFA,OAAOA,eAiBXA;MAfQA;QACiBA;QACrBA;QACkBA,6BAAlBA;;UAC6CA,gCAASA;;QAEtDA,mBASJA;aAReA;QAEYA;QAAvBA;QACAA,0CAAqBA;QACrBA,oBAIJA;;QAFIA,QAEJA;K;;;EAqf8CC;UAAPA;MAAOA,iCAAmBA,6BAAEA;K;;;;UAC9BA;MAInCA;QACEA,OAAOA,+BsB9aXA,8CtBmbCA;MADCA,OAAOA,iCACRA;K;;;;UAoFDC;MAEEA;MAAIA;QACFA,QAqDJA;eAlDMA;OAA+BA;MAA/BA;QACFA,OAAOA,eAiDXA;MA9CEA;QAxDqBA;QLpKrBC;UAEEA,kBAAiBA;QAiBnBA;QK0MED,OzB9nBJE,yCyB2qBAF;;MA1CEA;QAGEA,sBAAMA;MAGRA;QACEA,OAAOA,2CAmCXA;MA/GYA;;;QAgF6BA;QACrCA;QAhGsCA;;QAmGtCA;UACEA,cAAaA,UADfA;QAGAA,YAAiCA,iCAAjCA;UACgBA;UACEA;uCAAQA;4BAARA;UAChBA;YACEA,iCAAsBA,aAvmB5BA;;QA0mBEA,iBAiBJA;;MAdEA;QACYA;QAEaA;QAAvBA;QAhnBFA;QAmnB2BA,0CADzBA;UACEA,gBAAeA,YAAQA;QAEzBA,iBAMJA;;MADEA,QACFA;K;;;;csB5jBOG;MAELA,uDADiBA,2CAEnBA;K;;;qBCwGAC;wBACeA;MACbA;mBACwBA;UAEpBA,MAKNA;MAFEA,sBAAMA;IAERA,C;aAiCIC;MACFA;;QACEA,sB9CxCJA;M8C2CEA;QAEEA;;;U3CkHyCC;;Q2CrH3BD;eAUhBA;M3C0GAC;MAiaAD;M2C1gBYA;MAC0BA,uBA1PjCA;MA2PLA;QA9CAA,uBACYA;Q3CyZVA;Q2CxWAA;UAEEA,0BAYNA;QX9PSA;QW0PLA;UACEA,aAGNA;;K;;;mBtB5NGE;;K;;;;eAsFMC;MAkDAC,QAjDHD;QACFA,YAGJA;MADEA,WAAOA,SACTA;K;mEAqCaC;MAQJA;IA4BTA,C;6DApCaC;;K;oCAAAD;MAQJA;;;6GARIA;QAQJA;;;;;;cAAPA;cGqCEA,UAAUA;yBHvIFA;;gBGuIRA,UAAUA;2BHjCLA;;uBDhKPA;cCoK2CA,iEAAiBA;;;;cDkIvDA,4BCjIKA,OuB/LcA,QvB4LcA,qDAEhCA;;gBD1GDA;;gBC4GLA;gBAKEA,gBG6BAA,OAAUA,2BH7BUA;+BAChBA,gBAA0BA;6BAC5BA,cAD4BA;kBDlH3BE,WCuFLF,oBuB3KwBE,QvBwMRF,gHAGOA,8EAEWA;;;cA/BvBA,WAmCNA;;cAnCMA;;;MAQJA;IARIA,C;yBAsCTG;MrBzOqBA;sDoBuCvBA;MCuMEA;QDvMFA;QCyMIA,gBGrBFA,OAAUA;;MVpGLA;QO8H6BA;QAAhBA;6BAClBA;;UAC2BA;uCAAIA;qBAAJA;UACzBA;;;cAKuDA;cADnDA,gBGjCNA,OAAUA;cHmCJA,SAkBVA;;cAhBUA,gBGrCNA,OAAUA;cHsCJA;;;QAGNA,sBAAMA;;MAERA;;UAEIA,SAQNA;;UANMA,QAMNA;;UAJMA,QAINA;;UAFMA,QAENA;;K;mBAEUC;MACJA;MrBgiC2C5nB;MAnzCxB4nB,gDoBuCvBA;MCkPEA;QDlPFA;QCoPIA,gBGhEFA,OAAUA;;QH2DIA;;MD/OhBA,iCA6CKA,8BA7CLA;MA6CKA;QC2MkCA,oCDxPvCA,SA6CKA,8BA7CLA;;QwB8SwBA,+CvBnDwBA,oBwBxT9CA;MxB0UFA,OApOFA,oEA0OAA;K;kBAEKC;MuBrL0BA,oCvBwLMA,qCAAjBA;;MDtNbA;IC2NPA,C;oBAEaC;MAIXA;;IAsGFA,C;oCA1GaA;MAIXA;;;8DAJWA;QAIXA;;;;;;;;;cAGiBA;cAFVA;;gBDpSPA,2BCuSmCA;kBDvSnCA,0BCySmCA;;;+BAxORA,WAAWA,WAyOnBA;;kBACbA;;;gBDzODA;;gBC4ODA;;;cAGaA;cAEfA;cGxHAA,UAAUA,qDHyH6BA,OAAOA,+CAA0CA,gCAA2BA;cAEnGA,mDAAqBA;gDAAkBA;oCACxCA;;+BAGTA,gBAA0BA;6BAC5BA,cAD4BA;kCAKTA;kCACNA;kCA9PTA;;kBDALJ,WCuFLI,oBuB3KwBJ,QvB8UNI;;;gBAUdA;;;8BApQMA;;cAwQcA,uFAA8BA;2BAGhBA;2BAA0BA;;8BArM5DA;;gBAEFA,oBsBjJuBC,8BtBiJ6BD;cAGtCA;;gBA0JoCA;crBucpDC;;oCqB7lB6BD;cAE7BA;cAEUA;;;crBwLVE;cA+XAF;;cAAqBA;cA/XrBE;cA+XAF;8BqBzXyBA;;cDnVzBA;;;cwBlBwBA,avBuWhBA,uFAG6BA;cACnBA;;cANCA;mCuBnQWA,kBxB4CzBA,iCC+NYA,2EAAsCA,OAAOA,4DAR3CA;;;cAULA;cGzKZA,UAAUA,iEH4KwCA,OAAOA,6BrBvYpCA,2DqBuY2FA;cY1VjFG;;cZ6V/BH,uCrB04BSA,oBqBz4B4BA;cACrCA,wBrB5YqBA;cqB6YrBA;cACAA,wBAA6BA;cAE7BA;6BAEIA,gBAA0BA;2BAC5BA,cAD4BA;gBDzS3BJ,ewBpFmBA,QvB+XRI,gHAGOA,4CACNA,6BA/SPA;;cGkHRA,UAAUA,wFHlHFA,qCAuTgEA,6CAAqBA,+CAAqCA,kCAA6BA,oCAA+BA,OAAOA,mCAA6BA,uBAAsBA,oBAAaA;;;;;;;;;cAtFvQA;cAwFEA,gBG3KAA,OAAUA,mDH2KkCA;6BACxCA,gBAA0BA;2BAC5BA,cAD4BA;gCAKTA;gCACNA;gCAhUPA;;gBDALJ,WCuFLI,oBuB3KwBJ,QvBgZRI,sKAODA;;;;;;;;;;;;;;cAtGJA;;;;;;MAIXA;IAJWA,C;oBA4GAI;MAIPA;;IA6NNA,C;oCAjOaA;MAIPA;;;8DAJOA;QAIPA;;;;;;;;;cAAWA;gBACXA;cAEJA;cGvNEA,UAAUA,2DHuNyCA,OAAOA;;gBAIxDA,8BAAkBA;cAEjBA,0CAEQA,OUhbKA;gBVibhBA;+BA1VyBA,WAAWA,WA2VrBA;;kBAAiCA;;;gBGjOhDA,UAAUA;gBJzHPA;gBIkHHA,UAAUA;;gBH4OVA;;;8BA/VyBA,WAAWA,WAkWvBA;;6BAEAA;uBAA2BA;;sBAApBA;kBACcA,gFACnBA,OAAOA,0BACPA,OAAOA;kBGpPtBA,UAAUA,wCHsPeA,0CAA8BA;kCAEnDA;kBU+CS9E;;0BN7hBf8E;sBrC+esBC,EqC/etBD;oBrC+esBC,EqC9etBD;0BAoBOA;4BACFA;wBrCydiBC;+BqCxd0BD;0BAAoBA;;;;wBQPlEA;;sBtC+DQE;;sB0BkaWF,kEAAwBA,OAAOA;;wBAAxBA;;;;;sBG3P1BA,UAAUA,mEH2PgBA;+DYzaKD;sBZ6azBC,oCrB0zBGA,oBqB1zByCA,gEACnBA,OAAOA;sBAChCA;sBG1PNA,UAAUA;sBJzHPA;;sBIkHHA,UAAUA;oBAAVA,UAAUA;oBJlHPA;;oBC2XCA;;;oBAEAA;;;;;8BA7XIA;;cAoYcA,uFAA8BA;cAExBA,4EAAwBA,OAAOA;cAC5CA;cACAA;2BACGA;2BACLA;;;gBAAOA;;;;;cADFA,+CACEA,0BAAgCA,OAAOA;cAE3CA,kBAAhBA,4BAAgBA;gBAChBA;cG3RAA,UAAUA,2EH8RgDA,kCAAwBA,OAAOA,6BAAsBA,oCAAwBA,gCAAoBA,0BAAcA;uDAQ3IA,WC9bVA;+BD+bdA,gBAA0BA;6BAC5BA,cAD4BA;kCAKTA;kCACNA;kBD/ZdR,WCuFLQ,oBuB3KwBR,QvB+eNQ,2IA3ZRA;;;gBAsaNA;;;gBAEEA;;;;cA0EFA;mCAAMA,uDAANA;;;;;;;;;;;cAJFA;yBAMEA,cAAyBA;cACzBA;cGnYFA,UAAUA,4DHmY4CA;cACpDA;mCAAMA,qDAANA;;;;;;;;;;;;;qBAGEA;;gBACIA;gBAANA;;8BAKFA;gBCxgBFG;gBACAA;cE0HEH,UAAUA,iFHgZwDA,OAAOA,4BrB3mBpDA,mDqB2mBmGA;cY9jBzFD;;cZkkB/BC,uCrBqqBSA,oBqBpqB4BA;qBACrBA;gBAASA;cAAzBA,wBrBjnBqBA;cqBknBrBA;6BAEIA,gBAA0BA;2BAC5BA,cAD4BA;gBD3gB3BR,WCuFLQ,oBuB3KwBR,QvBimBRQ,gHAGOA,4CACNA,6BAjhBPA;;cGyHRA,UAAUA,2FHzHFA,8CAyhB6EA,8CAAoCA,kCAA6BA,oCAA+BA,OAAOA,mCAA6BA,uBAAsBA,0BAAmBA,yBAAaA;;;;;;;;;cAvJjRA;6BAyJMA,gBAA0BA;2BAC5BA,cAD4BA;gCAKTA;gCACNA;gCAjiBPA;;gBDALR,WCuFLQ,oBuB3KwBR,QvBinBRQ,sKAODA;;cAIbA;;;;;;;;;;;;;cA/NSA;;;;;;MAIPA;IAJOA,C;;;UAiGTI;MACEA;;;oDADFA;QACEA;;;;;;8BAAoBA;;;cD7exBA;8BCkfkCA;qBAASA;8BAAkBA;;;cuBpgBrCA,cvBigBVA,4EAEQA,uBACqBA;cACnBA;;;cANNA;mCuB7ZYA,kBxB4CzBA,yBCwXSA,cAAcA,gBACLA,8CAE6BA,qBAASA,wDAV3CA;;;cAaGA;;gBAbjBA;cAeAA;cGxUFA,UAAUA,mCxB3NWA,0DqBoiB8DA;qBAE7EA;;gBACFA,sBAAMA;cG5UVA,UAAUA,mCxB3NWA,mDqB0iB8DA;+BAC7EA,qBAAiBA;cAArBA;;;cGzUFA,UAAUA;cH4UNA;mCAAMA,yCACFA,kBAAeA,iCADnBA;;;;;qBAIEA;0BAA0BA,2BACAA,qBAC1BA;gBGzVNA,UAAUA,kFH2VmEA,0BAA2BA,kCAAyBA,sCAA8BA;gBG3V/JA,UAAUA;kBH+VNA,cAP4BA;uBAWTA;uBACNA;uBAtdTA;;gBDALZ,ewBpFmBA,QvBsiBNY;;;cAUhBA;;;MAjDEA;IAiDFA,C;;;;UAEAC;MACEA;;;oDADFA;QACEA;;;;;;;qBAAIA;8BAAgBA;qBAheGA;qBAAWA;qBAgeHA;;gBAE7BA,sBAAMA;;cAGWA;mCAAMA,eACrBA,cAAcA,aAAqBA,6BADpBA;;;;cAEDA;mCAAMA,kCACpBA,cAAcA,WAAuBA,6CADvBA;;;;qBAGRA;;cAANA;mCAAMA,+BA1ewBA,WA0esBA,6BAApDA;;;;;cAEJA;mCAAMA,yDAANA;;;;cACFA;;;MAbEA;IAaFA,C;;;;cC/lBGC;MACLA;6CAA+BA,4CAA+BA,mDAAsCA,gDAAwCA,qDAAmCA,4BACjLA;K;;;8BAYsBC;;kBAChBA;MACKA,MADcA;QACrBA,oCAgBJA;gBAdaA;;MACXA;QACSA,8DACGA;kBAINA;cSEYC;UTDdD;QAGFA;;MAEFA,WACFA;K;yBAEsBE;MACCA;kBAArBA;MAKAA,yBALAA,kDAGcA,wCAFJA,aAKZA;K;;;;uBA8CKC;;kBACCA,WAAWA;MAAfA;QACEA,MAQJA;;QAHIA,gBEmKAA,OAAUA,sCFnKeA;aACzBA;;IAEJA,C;eAamBC;MACbA;;;yDADaA;QACbA;;;;;;;;cAAkBA;sDAAqBA;;gBAEzCA;;;;;;;;cAGUA;mCsBakBA,kBxBYzBA,OA5FLA,yBEmEkBA,OFnElBA,iBA4FKA,+EEzBOA;;;cAEEA;ctB5GSA;csB6GrBA;;;;;;;;;;;;cAJFA;cAMEA,gBEwIAA,OAAUA,oCFxIkBA;cAC5BA;;;;;;;;;;;;;;;;cAEJA;;;;;;MAbMA;IAaNA,C;gBAEmBC;MACbA;;;0DADaA;QACbA;;;;;;cAAkBA;sDAAqBA;;gBAEzCA;;;;;8BAE0CA,WAAWA;cAA1CA;mCAAMA,gEAANA;;;;cACKA;mCAAMA,+CAAwCA,mEAA9CA;;;cACFA;mCAAMA,iEAANA;;;;cAChBA;mCAAMA,6EAA6CA,kDAAnDA;;;cACAA;;;;;;cACFA;;;MATMA;IASNA,C;qBAEsBC;MAEhBA;;;+DAFgBA;QAEhBA;;;;;;;cAAcA;mCsBXYA,kBtBYzBA,aF5FLA,yBE2FwBA,OF3FxBA,uCwBgG6BA,0CC7J3BA,iBzB6DFA,iCyB7DEA,evB8JwCA,kCsBnHlBA,QtBmHlBA,0GANYA;;;cASlBA;;;;;;cACFA;;;MAVMA;IAUNA,C;eAEQC;;iBA/EWA;;kCAgFgBA;MAA1BA;8BAAaA;MAApBA,SAAOA,IACTA;K;qBAEaC;MACPA;;;+DADOA;QACPA;;;;;;;cF5GJA,8BE4GwBA,OF5GxBA;;cwBlBwBA,atB+HQA;;gBAAgCA;cAD9CA;mCsB5BYA,kBtB6BzBA,sDAC8CA,kCsBhI3BA,QtBgITA,0GAFGA;;;cAKLA;mCAAMA,mDAEjBA,WAAWA,wCAFAA;;;cAIbA;mCAAMA,uFAANA;;;yBA5DAA;cA8DFA,WA7DEA;;cA6DFA;;;MAXMA;IAWNA,C;YAZaC;;K;2BAcAC;MACJA;;;qEADIA;QACJA;;;;;;cAAPA,gBEwEEA,OAAUA;;gCF1KKA;;gBAoGGA,WAAlBA,oDAA2CA;;8BApG5BA;;cAsGjBA,2CAAcA;;cAChBA;;;MALSA;IAKTA,C;gBAIeC;MACTA;;;0DADSA;QACTA;;;;;;;cACmBA,oCADyCA,YFpIhEA,0ByB7DEA;czB6DFA,8BEwI0BA,OFxI1BA;cwBlBwBA;;gBtB4JOA;;csB5JPA,atB8JlBA;cAA2CA;;;;cAJ7BA;mCsBxDUA,kBtByDzBA,wDAKgCA,kCsBhKbA,QtBgKlBA,mHANcA;;;yEAUkBA;;cAAtCA;;;;cACFA;;;MAfMA;IAeNA,C;aAKkBC;MACZA;;;uDADYA;QACZA;;;;;;cAAmBA;;cFzJvBA,8BE4JmBA,OF5JnBA;cwBlBwBA;;gBtBgLSA;;cAFpBA;mCsB5EiBA,kBxB4CzBA,oFEgCQA;;;ctBnMUA;;csBuMvBA;;;;cACFA;;;MARMA;IAQNA,C;+CAzImBC;;K;;;qBGtFdC;MACCA;;QACFA,MAYJA;MrC6dwBpB,sCqCneGoB,0CAEuBA;QAE9CA;IAEJA,C;WASKC;UAEHA,2BADAA;UAEAA;IACFA,C;;EFxBgDC;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAwBvCC;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;;UAOrBC;MAChBA;MYPZC,0BZOmBD,2BAAsBA,MAAMA,qBAAgBA;IAC9DA,C;;;;UAMuBA;;;MAEFA;MADlBA;MC4NAA,UAAUA;MJzMZA;;MGdoCA,4BHcpCA;MGb8CA,0BsBhD5CA;MtBiD8DA,mCsBjD9DA;MtBkDkDA,6BsBlDlDA;MtBmD8CA,4BsBnD9CA;MtBoDkDA,6BsBpDlDA;MtBqD8DA,mCsBrD9DA;MtBuDkBA;MAElBA;QCoNAA,UAAUA;QDlNRA,MAaHA;;MATKA;MHFNA;;MGIEA;IAODA,C;;;;UAGiBA;MACdA;;;oDADcA;QACdA;;;;;;cAAuBA,oCqBzCAA,WxB0B3BA;cGgBcA;cACWA,qBAAbA;cACZA;cCgLEA,UAAUA,oCDhLgBA,4BAAgBA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAC5CA;;;cAGoBA;cAC2BA,4BAArBA;cAELA;cAAOA,eAAPA;kCvBotCVA,oBa3uCuBE,CATDC,2BUkCiBH,YAAvBA;cACSA,cAAPA;cACDA;cAA4BA;cACzBA,wEvB+sCpBA,oBa3uCuBE,CATDC,2BUuC6BH,YAA/BA;cAETA;cAAuBA;cAE/BA;4EAA2CA;cC8JvDA,UAAUA,wDD5JiCA;cnC84G7CA;;cgCp7GAA;;;cG0CMA,qFDjF6CI;cFuCnDJ,gBAkEKA,YwBpFmBA,QrB8DDA;;cAKjBA;;;;cAIyCA,4BAArBA;cC6IxBA,UAAUA;cD3INA;cHvDNA,8BAkEKA,YwBpFmBA,QrB0EDA;;cAMnBA;;;;cAG+BA,oBAAfA;cACeA,sBAAfA;oBAGVA;;c/B6SZK;cyBgDIL,wE3B7O4BA,8C2B6O5BA;gCM5VIA;;gBC4HJA,UAAUA,+BD3HqDA;2BFkC7DA,gBAA0BA;kBGgG5BM,UAAUA;yBH7FVN,cAAyBA;;gBGsFzBM,UAAUN,uCHpFmBA;uBAC/BA;;cD/GAA,8BAkEKA,YwBpFmBA,QrB4FDA;;cAOnBA;;;;;;cAIaA;cACcA,kBAAbA;cAC6BA,4BAArBA;cACNA;;cACuBA,kBAAtBA;cACsBA,kBAAtBA;cAC0BA,4BAArBA;cCuGxBA,UAAUA,gDDpGwBA,2BAAeA,2DAAkDA,mDAAwBA;cAErGA;;gBCgHtBA,UAAUA;gBJhNZA,kBAkEKA,YwBpFmBA,QrBqHCA;;gBAUjBA;;;cAG2CA;cAA/BA;cAGDA;cADbA;mCAAMA,kEAKEA,wEALRA;;;cHlHNA,kBAkEKA,YwBpFmBA,QrB4IDA;qBASTA,cFvBaA;;cEyBvBA;;;;cAG+BA,sBAAfA;cC0DlBA,UAAUA;cDxDNA;cH1INA,8BAkEKA,YwBpFmBA,QrB6JDA;;cAOnBA;;;;;;mCvB2lCOA,oBa3uCuBE,CATDC,2BU6J0BH,YAAXA;cACXA,oBAAhBA;cAC0BA,4BAArBA;cACFA;;gBCuDtBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrB8KCA;;gBAMjBA;;;8BAEmCA,mBACdA;;;cAAvBA;;;cC6BJA,UAAUA;cAOVO,UAAUD;yBF7MZN;cACAA,oCAAsBA;;cCwKhBA;;;;cAI2CA,4BAArBA;cCyB1BA,UAAUA;cDtBJA;mCAAMA,sDAEDA,gDAFLA;;;;;cH5KRA,8BAkEKA,YwBpFmBA,QrBmMDA,2EAEEA;;cAOrBA;;;;;;cAIiBA;cAC0BA,4BAArBA;cACqBA,4BAArBA;cACFA;;gBCetBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrBsNCA;;gBAMjBA;;;8BAEmCA,mBAEdA;;cAAvBA;;;cCZJA,UAAUA,2DDa+CA;cAE/CA;mCAAMA,oCAAkCA,aAAWA,oCAAnDA;;;;;cAHNA;;;;cCZJA,UAAUA,mFDkBsDA;cACjDA;mCAAMA,sDAEVA,aAAWA,oCAFPA;;;;;;cHrNjBA;cAkEKA,ewBpFmBA,QrB4ODA,0IuBlRJQ,CjC+CQC,gEiC/CQD;;cvB4R/BR;;;;cAGiBA;cACcA,sBAAfA;cCtClBA,UAAUA;oBDyCFA;;c/BuIZK;cyBgDIL,wE3B7O4BA,+C2B6O5BA;gCMtLIA;;gBC1CJA,UAAUA,kDD2CwCA;gBAC9BA;qBFrJlBA,gBAgB0BA;kBGgG5BU,UAAUJ;mBH7GVN,cAgByBA;;gBGsFzBU,UAAUV,wCHpGoBA;iBAChCA;;cD/FAA,8BAkEKA,YwBpFmBA,QrBmQDA;;cAOnBA;;;;;;cAIiCA,oBAAhBA;cAC0BA,4BAArBA;cACqBA,4BAArBA;cACFA;;gBC/CtBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrBoRCA;;gBAMjBA;;;;uCAEmCA,mBAEdA;cAAvBA;;;cC1EJA,UAAUA;cD4EEA;mCAAMA,oCAAkCA,qCAAxCA;;;;;cAFRA;;;;cC1EJA,UAAUA;cDgFEA;mCAAMA,sDAEPA,qCAFCA;;;;;;cHlRdA;cAkEKA,ewBpFmBA,QrBwSDA,gJuB9UJQ,CjC+CQC,gEiC/CQD;;cvBuV/BR;;;;0CvB88BOA,oBa3uCuBE,CATDC,2BU0S2BH,YAAlBA;cACKA,4BAArBA;cACFA;;gBCrFtBA,UAAUA;gBJhNZA,8BAkEKA,YwBpFmBA,QrB0TCA;;gBAMjBA;;;yBD7SRA,mBAAmBA;cEiMjBA,UAAUA,wCD+G0BA;yBAClBA,6BAAdA;;gBChHJW,UAAUX,0CH9FYA,gDAAkCA;iBAnC/BW,WAAWA,WAoC3BX;;cDrGXA,8BAkEKA,YwBpFmBA,QrBwUDA;;cAMnBA;;;;cAG2BA,oBAAbA;cACiBA,sBAAfA;cC9HlBA,UAAUA;cDiIkBA,gDAApBA;;2BFzMNA,gBAjB0BA;kBGgG5BY,UAAUN;yBH5EVN,cAjByBA;;gBGsFzBY,UAAUZ,wCHnEoBA;uBAC3BA;;cDhILA,8BAkEKA,YwBpFmBA,QrBwVDA;;cAMnBA;;;;cAG+BA,sBAAfA;cC7IlBA,UAAUA;cDgJkBA,gDAApBA;;;;;uBAEMA,cAAyBA;gBHpVzCA,kBAkEKA,YwBpFmBA,QrBuWCA,0FAEUA;;gBHvVnCA,kBAkEKA,YwBpFmBA,QrB+WCA;;cAQrBA;;;;cCrJFA,UAAUA,8CDuJ8BA;;;;;cAE3CA;;;MA1VKA;IA0VLA,C;;;EApS4Ca;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAsKpBA;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAwFTA;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;EAepBA;UAAPA;MAAOA,gCAAEA,iBAAWA,QAAOA;K;;;;UAyB3Db;MACfA,0BAAcA;IACfA,C;;;;OwBxXac;MAAEA;oBAAwDA;MAAtCA,uCAAkBA,gBAAeA,MAAKA;K;gBAchEC;MAAYA,iBAAKA;K;cAGlBC;MAAcA,gBAAIA;K;;;cC1ClBC;MAAcA,iBAAIA,MAAMA,mBAAQA,yBAAaA,QAAQA;K;;;gBxBZjDC;MACuCA,aAA9CA;mCAAQA,KViXQC;iBUjXyCD;MAAzDA,sDAAqEA;K;aAgF/DE;MACFA;cAEFA;iBAGeA;UAAMA;QASlBA;;QAPYA,yBAAKA;UAAMA;QAOvBA;;MAAPA,qBACFA;K;SA+EKC;;qBuBnI4BC;MvB4GQD,2BuB5GOC;QvBmJ5CD;UAE0BA;UACkBA;;QAKfA;QpCqRT7C;mC4DpdS6C;QAHjCA;iBxBoMQA;UACFA;;UAEAA,qBAAKA;;IASXA,C;gBA0DkBE;cACkBA;qBACxBA;;Uc4DZC;Ud5DYD;;QAARA,Oc3SJE,2BAsH4BF,oBAtH5BE,+BdgTAF;;QAFIA,OAAOA,qBAAKA,cAEhBA;K;cAEKG;mBAA8BA;;0CczDvBA;QADLA;UAAcA,kBAAMA;QACzBA;;MdyDiCA,WAAwBA;K;mBAlQ9BC;;K;;;UAWEC;;uBAAoBA;MAc7CA;QACFA,kBAAMA;MAEJA;QACFA,kBAAMA;MAIIA;MAGZA;QAC2BA;;QAGhBA,0BAAOA;QACLA;;MAKfC,qCAH4DD;MAM1DE;UACEA,WAASA;;QAEDA;MAzCyBF,SAAmBA;K;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mFrCU7CG,MACTA,6CADSA,A;6F8CAEC,MAAaA,oCAAbA,A;uG7CgyCiBC,MAC1BA,kCAAeA;;;;OADWA,A;mGAKAC,MAC1BA,kCAAeA;;;;OADWA,A;+FAKAC,MAC1BA,kCAAeA,4CADWA,A;6GAKAC,MAC1BA,kCAuNaA;;;;;;;KAQRA,GAhOqBA,A;yGAKAC,MAC1BA,kCAAeA,8CADWA,A;uHAKAC,MAC1BA,kCA4NaA;;;;;;;KAQRA,GArOqBA,A;uGAKAC,MAC1BA,kCAAeA,gDADWA,A;qHAKAC,MAC1BA,kCA+OaA;;;;;;KAORA,GAvPqBA,A;iHAKAC,MAC1BA,kCAAeA,kDADWA,A;+HAKAC,MAC1BA,kCAmPaA;;;;;;KAORA,GA3PqBA,A;qGcp3CRC,MAClBA,0CADkBA,A;6FW0iBCC,MbmkBnBC,cAnCSD,oBahiB+CA,4hBAArCA,A;gFAsLNE,MAAeA,oCAAfA,A;iDKhLTC,MNjiB8BA,kBMiiBDA,iBAA7BA,A;uEyBnhBYC;MAwLpBA,+BAFgBxF;MAEhBA;MAxLoBwF;K;2CMrChBC,MAASA,8BAATA,A;qDzB+RgBC,MAAOA,mBAAPA,A", + "x_org_dartlang_dart2js": { + "minified_names": { + "global": "$get$DART_CLOSURE_PROPERTY_NAME,794,$get$Logger_root,1171,$get$Random__secureRandom,1065,$get$TypeErrorDecoder_noSuchMethodPattern,1153,$get$TypeErrorDecoder_notClosurePattern,1154,$get$TypeErrorDecoder_nullCallPattern,1155,$get$TypeErrorDecoder_nullLiteralCallPattern,1156,$get$TypeErrorDecoder_nullLiteralPropertyPattern,1157,$get$TypeErrorDecoder_nullPropertyPattern,1158,$get$TypeErrorDecoder_undefinedCallPattern,1180,$get$TypeErrorDecoder_undefinedLiteralCallPattern,1181,$get$TypeErrorDecoder_undefinedLiteralPropertyPattern,1182,$get$TypeErrorDecoder_undefinedPropertyPattern,1183,$get$_AsyncRun__scheduleImmediateClosure,1061,$get$_Base64Decoder__emptyBuffer,1024,$get$_Base64Decoder__inverseAlphabet,1036,$get$_CopyingBytesBuilder__emptyList,1025,$get$_hashSeed,810,$get$logger,812,ArgumentError,312,ArgumentError$,814,ArgumentError$value,1185,ArrayIterator,815,AssertionError,311,AssertionError$,814,AsyncError,816,AsyncError_defaultStackTrace,1097,Base64Codec,817,Base64Decoder,818,Base64Encoder,819,BoundClosure,820,BoundClosure__computeFieldNamed,1016,BoundClosure__interceptorFieldNameCache,1035,BoundClosure__receiverFieldNameCache,1059,BoundClosure_evalRecipe,1103,BoundClosure_interceptorOf,1141,BoundClosure_receiverOf,1170,ByteBuffer,821,ByteData,822,BytesBuilder,737,Closure,823,Closure0Args,824,Closure2Args,825,Closure__computeSignatureFunctionNewRti,1017,Closure_cspForwardCall,1092,Closure_cspForwardInterceptedCall,1093,Closure_forwardCallTo,1112,Closure_forwardInterceptedCallTo,1113,Closure_fromTearOff,1116,Codec,826,ConcurrentModificationError,322,ConcurrentModificationError$,814,ConstantMap,827,ConstantMapView,828,ConstantStringMap,829,Converter,830,CryptorError,831,DART_CLOSURE_PROPERTY_NAME,794,DateTime,832,DateTime__fourDigits,1026,DateTime__threeDigits,1068,DateTime__twoDigits,1071,EfficientLengthIterable,833,EfficientLengthMappedIterable,834,Error,835,Error__throw,1069,Error_safeToString,1172,Error_throwWithStackTrace,1174,ExceptionAndStackTrace,836,Exception_Exception,814,FixedLengthListMixin,837,Float32List,838,Float64List,839,FormatException,324,FormatException$,814,FrameCryptor,345,FrameCryptor_decodeFunction_decryptFrameInternal,840,FrameCryptor_decodeFunction_ratchedKeyInternal,841,FrameInfo,842,Function,843,Future,844,IndexError,845,IndexError$withLength,1188,Int16List,846,Int32List,847,Int8List,848,Interceptor,849,Invocation,850,Iterable,851,IterableExtension_firstWhereOrNull,852,Iterable_iterableToFullString,1143,Iterable_iterableToShortString,1144,Iterator,853,JSArray,854,JSArray_JSArray$fixed,1110,JSArray_JSArray$markFixed,1151,JSBool,855,JSInt,856,JSInvocationMirror,857,JSNull,858,JSNumNotInt,859,JSNumber,860,JSObject,861,JSString,862,JSUnmodifiableArray,863,JS_CONST,864,JavaScriptBigInt,865,JavaScriptFunction,866,JavaScriptIndexingBehavior,867,JavaScriptObject,868,JavaScriptSymbol,869,JsLinkedHashMap,870,JsNoSuchMethodError,52,JsNoSuchMethodError$,814,KeyOptions,871,KeyProvider,767,KeySet,872,LateError,873,LegacyJavaScriptObject,874,Level,875,LinkedHashMap,876,LinkedHashMapCell,877,LinkedHashMapKeyIterator,878,LinkedHashMapKeysIterable,879,LinkedHashMap_LinkedHashMap$_empty,1023,LinkedHashMap_LinkedHashMap$_literal,1041,List,880,ListBase,881,ListIterable,882,ListIterator,883,List_List$_of,1057,List_List$filled,1106,List_List$of,1162,LogRecord,884,LogRecord__nextNumber,1055,Logger,350,Logger_Logger,814,Logger_Logger_closure,885,Logger__loggers,1042,Logger_root,1171,Map,886,MapBase,887,MapBase_mapToString,1150,MapBase_mapToString_closure,888,MapView,889,MappedIterable,11,MappedIterable_MappedIterable,814,MappedIterator,890,MappedListIterable,891,NativeByteBuffer,892,NativeByteData,94,NativeByteData_NativeByteData,814,NativeFloat32List,893,NativeFloat64List,894,NativeInt16List,895,NativeInt32List,896,NativeInt8List,897,NativeTypedArray,898,NativeTypedArrayOfDouble,899,NativeTypedArrayOfInt,900,NativeTypedData,901,NativeUint16List,902,NativeUint32List,903,NativeUint8ClampedList,904,NativeUint8List,96,NativeUint8List_NativeUint8List,814,NativeUint8List_NativeUint8List$view,1186,NoSuchMethodError,905,NoSuchMethodError_NoSuchMethodError$withInvocation,1187,NoSuchMethodError_toString_closure,906,Null,907,NullError,908,NullRejectionException,909,NullThrownFromJavaScriptException,910,Object,911,Object_hash,1136,OutOfMemoryError,912,ParticipantKeyHandler,343,ParticipantKeyHandler$,814,Pattern,913,PlainJavaScriptObject,914,Primitives__generalApplyFunction,1027,Primitives__identityHashCodeProperty,1032,Primitives__objectTypeNameNewRti,1056,Primitives_applyFunction,1077,Primitives_extractStackTrace,1105,Primitives_functionNoSuchMethod,1117,Primitives_functionNoSuchMethod_closure,915,Primitives_getDay,1118,Primitives_getHours,1119,Primitives_getMilliseconds,1124,Primitives_getMinutes,1125,Primitives_getMonth,1126,Primitives_getSeconds,1127,Primitives_getYear,1130,Primitives_lazyAsJsDate,1147,Primitives_objectHashCode,57,Primitives_objectTypeName,1161,Primitives_safeToString,1172,Primitives_stringFromNativeUint8List,1173,Primitives_trySetStackTrace,1179,Random__secureRandom,1065,RangeError,916,RangeError$range,1169,RangeError$value,1185,RangeError_checkNotNegative,1086,RangeError_checkValidRange,1087,Record,917,Rti,918,Rti__getCanonicalRecipe,1028,Rti__getFutureFromFutureOr,1029,Rti__getQuestionFromStar,1030,Rti__isUnionOfFunctionType,1038,RuntimeError,919,S,14,SentinelValue,920,SifGuard,921,StackOverflowError,922,StackTrace,923,StackTrace_current,1094,StateError,321,StateError$,814,StaticClosure,924,Stream,925,StreamController,926,StreamIterator_StreamIterator,814,StreamSubscription,927,Stream_length_closure,608,Stream_length_closure0,608,String,928,StringBuffer,929,StringBuffer__writeAll,1073,String_String$fromCharCodes,1114,String__stringFromUint8List,1067,Symbol,930,Symbol0,930,SystemHash_combine,1089,SystemHash_finish,1109,TearOffClosure,931,TrustedGetRuntimeType,932,TypeError,933,TypeErrorDecoder,934,TypeErrorDecoder_extractPattern,1104,TypeErrorDecoder_noSuchMethodPattern,1153,TypeErrorDecoder_notClosurePattern,1154,TypeErrorDecoder_nullCallPattern,1155,TypeErrorDecoder_nullLiteralCallPattern,1156,TypeErrorDecoder_nullLiteralPropertyPattern,1157,TypeErrorDecoder_nullPropertyPattern,1158,TypeErrorDecoder_provokeCallErrorOn,1167,TypeErrorDecoder_provokePropertyErrorOn,1168,TypeErrorDecoder_undefinedCallPattern,1180,TypeErrorDecoder_undefinedLiteralCallPattern,1181,TypeErrorDecoder_undefinedLiteralPropertyPattern,1182,TypeErrorDecoder_undefinedPropertyPattern,1183,Uint16List,935,Uint32List,936,Uint8ClampedList,937,Uint8List,938,UnimplementedError,320,UnimplementedError$,814,UnknownJavaScriptObject,939,UnknownJsTypeError,940,UnmodifiableMapView,941,UnsupportedError,319,UnsupportedError$,814,WhereIterable,942,WhereIterator,943,Zone,944,Zone__current,1022,_AddStreamState,945,_AssertionError,946,_AsyncAwaitCompleter,947,_AsyncCallbackEntry,948,_AsyncCompleter,949,_AsyncRun__initializeScheduleImmediate,1033,_AsyncRun__initializeScheduleImmediate_closure,950,_AsyncRun__initializeScheduleImmediate_internalCallback,951,_AsyncRun__scheduleImmediateClosure,1061,_AsyncRun__scheduleImmediateJsOverride,1062,_AsyncRun__scheduleImmediateJsOverride_internalCallback,952,_AsyncRun__scheduleImmediateWithSetImmediate,1063,_AsyncRun__scheduleImmediateWithSetImmediate_internalCallback,953,_AsyncRun__scheduleImmediateWithTimer,1064,_Base64Decoder,954,_Base64Decoder__allocateBuffer,1010,_Base64Decoder__checkPadding,1015,_Base64Decoder__emptyBuffer,1024,_Base64Decoder__inverseAlphabet,1036,_Base64Decoder__trimPaddingChars,1070,_Base64Decoder_decodeChunk,1096,_Base64Encoder,955,_Base64Encoder_encodeChunk,1100,_BroadcastStream,956,_BroadcastStreamController,957,_BroadcastSubscription,556,_BufferingStreamSubscription,552,_BufferingStreamSubscription__registerErrorHandler,268,_Completer,958,_ControllerStream,959,_ControllerSubscription,960,_CopyingBytesBuilder,961,_CopyingBytesBuilder__emptyList,1025,_CyclicInitializationError,962,_DelayedData,963,_DelayedEvent,964,_DoneStreamSubscription,551,_Enum,965,_Error,966,_Error_compose,1090,_EventDispatch,967,_Exception,968,_FunctionParameters,969,_Future,970,_FutureListener,971,_Future__addListener_closure,972,_Future__asyncCompleteError_closure,973,_Future__asyncCompleteWithValue_closure,974,_Future__chainCoreFuture,1014,_Future__chainCoreFuture_closure,975,_Future__chainForeignFuture_closure,976,_Future__chainForeignFuture_closure0,976,_Future__chainForeignFuture_closure1,976,_Future__prependListeners_closure,977,_Future__propagateToListeners,1058,_Future__propagateToListeners_handleError,978,_Future__propagateToListeners_handleValueCallback,979,_Future__propagateToListeners_handleWhenCompleteCallback,980,_Future__propagateToListeners_handleWhenCompleteCallback_closure,981,_Future__propagateToListeners_handleWhenCompleteCallback_closure0,981,_HashMap,982,_HashMapKeyIterable,983,_HashMapKeyIterator,984,_HashMap__getTableEntry,1031,_HashMap__newHashTable,1053,_HashMap__setTableEntry,1066,_IdentityHashMap,985,_JSSecureRandom,723,_JS_INTEROP_INTERCEPTOR_TAG,986,_KeysOrValues,987,_KeysOrValuesOrElementsIterator,988,_NativeTypedArrayOfDouble_NativeTypedArray_ListMixin,989,_NativeTypedArrayOfDouble_NativeTypedArray_ListMixin_FixedLengthListMixin,990,_NativeTypedArrayOfInt_NativeTypedArray_ListMixin,991,_NativeTypedArrayOfInt_NativeTypedArray_ListMixin_FixedLengthListMixin,992,_Parser_collectArray,1088,_Parser_create,1091,_Parser_handleArguments,1131,_Parser_handleDigit,1132,_Parser_handleExtendedOperations,1133,_Parser_handleIdentifier,1134,_Parser_handleTypeArguments,1135,_Parser_indexToType,1137,_Parser_parse,1163,_Parser_toType,1176,_Parser_toTypes,1177,_Parser_toTypesNamed,1178,_PendingEvents,993,_PendingEvents_schedule_closure,994,_Required,995,_RootZone,996,_RootZone_bindCallbackGuarded_closure,997,_StackTrace,998,_StreamControllerLifecycle,999,_StreamImpl,1000,_StreamIterator,1001,_StringStackTrace,1002,_SyncBroadcastStreamController,1003,_SyncBroadcastStreamController__sendData_closure,566,_TimerImpl,254,_TimerImpl$,814,_TimerImpl_internalCallback,1004,_Type,134,_TypeError,1005,_TypeError$fromMessage,1115,_TypeError__TypeError$forType,1111,_Universe__canonicalRecipeJoin,1012,_Universe__canonicalRecipeJoinNamed,1013,_Universe__createFutureOrRti,1018,_Universe__createGenericFunctionRti,1019,_Universe__createQuestionRti,1020,_Universe__createStarRti,1021,_Universe__installTypeTests,1034,_Universe__lookupBindingRti,1043,_Universe__lookupFunctionRti,1044,_Universe__lookupFutureOrRti,1045,_Universe__lookupGenericFunctionParameterRti,1046,_Universe__lookupGenericFunctionRti,1047,_Universe__lookupInterfaceRti,1048,_Universe__lookupQuestionRti,1049,_Universe__lookupRecordRti,1050,_Universe__lookupStarRti,1051,_Universe__lookupTerminalRti,1052,_Universe_addErasedTypes,1074,_Universe_addRules,1075,_Universe_bind,1084,_Universe_eval,1101,_Universe_evalInEnvironment,1102,_Universe_findErasedType,1107,_Universe_findRule,1108,_UnmodifiableMapMixin,1006,_UnmodifiableMapView_MapView__UnmodifiableMapMixin,1007,_UnmodifiableNativeByteBufferView,1008,_Utils_newArrayOrEmpty,1152,_Utils_objectAssign,1160,_Zone,1009,_areArgumentsSubtypes,245,_arrayInstanceType,123,_asBool,160,_asBoolQ,162,_asBoolS,161,_asDouble,163,_asDoubleQ,165,_asDoubleS,164,_asInt,167,_asIntQ,169,_asIntS,168,_asNum,171,_asNumQ,173,_asNumS,172,_asObject,155,_asString,175,_asStringQ,177,_asStringS,176,_asTop,157,_asyncAwait,258,_asyncRethrow,260,_asyncReturn,259,_asyncStartSync,257,_awaitOnObject,261,_awaitOnObject_closure,1011,_awaitOnObject_closure0,1011,_callDartFunctionFast,330,_callDartFunctionFast1,333,_checkValidIndex,98,_checkValidRange,99,_convertDartFunctionFast,329,_createRuntimeType,133,_diagnoseUnsupportedOperation,47,_ensureNativeList,95,_failedAsCheck,149,_finishIsFn,139,_functionRtiToString,180,_functionToJS1,332,_generalAsCheckImplementation,147,_generalIsTestImplementation,142,_generalNullableAsCheckImplementation,148,_generalNullableIsTestImplementation,143,_hashSeed,810,_installSpecializedAsCheck,140,_installSpecializedIsTest,136,_instanceType,124,_instanceTypeFromConstructor,125,_instanceTypeFromConstructorMiss,126,_interceptError,264,_interceptUserError,265,_invokeClosure,59,_isBool,159,_isFunctionSubtype,242,_isFutureOr,153,_isInCallbackLoop,1037,_isInt,166,_isInterfaceSubtype,243,_isListTestViaProperty,146,_isNever,158,_isNum,170,_isObject,154,_isRecordSubtype,246,_isString,174,_isSubtype,240,_isTestViaProperty,145,_isTop,156,_iterablePartsToStrings,327,_lastCallback,1039,_lastPriorityCallback,1040,_makeAsyncAwaitCompleter,255,_microtaskLoop,269,_nextCallback,1054,_noDartifyRequired,340,_noJsifyRequired,334,_nullDoneHandler,278,_nullErrorHandler,277,_nullIs,141,_recordRtiToString,179,_registerErrorHandler,268,_rootHandleError,279,_rootHandleError_closure,1060,_rootRun,280,_rootRunBinary,282,_rootRunUnary,281,_rootScheduleMicrotask,283,_rtiArrayToString,178,_rtiToString,182,_runGuarded,275,_scheduleAsyncCallback,271,_schedulePriorityAsyncCallback,272,_setArrayType,119,_startMicrotaskLoop,270,_structuralTypeOf,129,_substitute,108,_substituteArray,115,_substituteFunctionParameters,117,_substituteNamed,116,_unminifyOrTag,183,_unwrapNonDartException,55,_wrapJsFunctionForAsync,262,_wrapJsFunctionForAsync_closure,1072,allowInterop,331,alternateTagFunction,1076,applyHooksTransformer,91,argumentErrorValue,40,assertThrow,76,async__AsyncRun__scheduleImmediateJsOverride$closure,1078,async__AsyncRun__scheduleImmediateWithSetImmediate$closure,1079,async__AsyncRun__scheduleImmediateWithTimer$closure,1080,async___nullDoneHandler$closure,1081,async___nullErrorHandler$closure,1082,async___startMicrotaskLoop$closure,1083,bool,1085,boolConversionCheck,75,callMethod,336,checkNotNullable,9,closureFromTearOff,68,closureFunctionType,120,convertDartClosureToJS,60,convertDartClosureToJSUncached,61,createRecordTypePredicate,92,createRuntimeType,131,dartify,341,dartify_convert,1095,defineProperty,79,diagnoseIndexError,38,diagnoseRangeError,39,dispatchRecordsForInstanceTags,1098,double,1099,fillLiteralMap,58,findNALUIndices,342,findType,107,getAlgoOptions,355,getInterceptor$,1120,getInterceptor$asx,1121,getInterceptor$ax,1122,getInterceptor$x,1123,getIsolateAffinityTag,78,getNativeInterceptor,1,getRuntimeTypeOfDartObject,128,getTagFunction,1128,getTraceFromException,56,getTrackCryptor,344,getTrackCryptor_closure,1129,getTypeFromTypesTable,127,iae,36,initHooks,90,initHooks_closure,1138,initHooks_closure0,1138,initHooks_closure1,1138,initNativeDispatch,88,initNativeDispatchContinue,89,initNativeDispatchFlag,1139,initializeExceptionWrapper,42,instanceOrFunctionType,121,instanceType,122,int,1140,interceptorsForUncacheableTags,1142,ioore,37,isDefinitelyTopType,137,isJsIndexable,13,isNullable,247,isSoundTopType,248,isSubtype,239,isToStringVisiting,10,jsify,335,jsify__convert,1145,keyProviders,1146,logger,812,lookupAndCacheInterceptor,80,main,349,main__closure,1148,main__closure0,1148,main__closure1,1148,main__closure2,1148,main_closure,1149,main_closure0,1149,main_closure1,1149,main_closure2,1149,makeDefaultDispatchRecord,87,makeDispatchRecord,0,makeLeafDispatchRecord,86,num,1159,objectHashCode,57,participantCryptors,1164,patchInteriorProto,85,printString,351,promiseToFuture,337,promiseToFuture_closure,1165,promiseToFuture_closure0,1165,prototypeForTagFunction,1166,quoteStringForRegExp,93,saveStackTrace,54,scheduleMicrotask,273,throwConcurrentModificationError,48,throwCyclicInit,77,throwExpression,44,throwExpressionWithWrapper,45,throwLateFieldADI,353,throwLateFieldNI,352,throwUnsupportedOperation,46,toStringVisiting,1175,toStringWrapper,43,typeLiteral,135,unminifyOrTag,12,unsetCryptorParticipant,348,unsetCryptorParticipant_closure,1184,unwrapException,53,wrapException,41", + "instance": "$add,1189,$and,1190,$arguments,1216,$call,1252,$div,1191,$eq,1192,$ge,1193,$gt,1194,$index,1195,$indexSet,1196,$le,1197,$lt,1198,$mod,1199,$mul,1200,$negate,1201,$or,1202,$protected,1235,$shl,1203,$shr,1204,$sub,1205,$tdiv,1206,$this,1243,$xor,1207,T,1214,_,1208,_0,1209,_JSSecureRandom$0,814,_TimerImpl$2,814,__,1210,__0,1211,__FrameCryptor_kind_A,1673,__ParticipantKeyHandler_cryptoKeyRing_A,1683,___,1212,__internal$_current,1290,__internal$_index,1293,__internal$_iterable,1294,__internal$_length,1296,__internal$_name,1298,__js_helper$_addHashTableEntry,1300,__js_helper$_addHashTableEntry$3,1300,__js_helper$_current,1311,__js_helper$_getBucket$2,1317,__js_helper$_index,1320,__js_helper$_kind,1326,__js_helper$_length,1328,__js_helper$_message,1331,__js_helper$_name,1335,__js_helper$_rest,1345,__rti$_message,1375,_add,1384,_add$1,1384,_addAllFromArray,1274,_addAllFromArray$1,1274,_addEventError,1385,_addEventError$0,1385,_addHashTableEntry,1499,_addHashTableEntry$3,1499,_addListener,1386,_addListener$1,1386,_addPending,1387,_addPending$1,1387,_addStreamState,1388,_alphabet,1519,_arguments,1301,_argumentsExpr,1302,_as,1364,_async$_box_0,1392,_async$_controller,1432,_async$_hasValue,1447,_async$_next,1462,_async$_previous,1475,_asyncComplete,1389,_asyncComplete$1,1389,_asyncCompleteError,1390,_asyncCompleteError$2,1390,_asyncCompleteWithValue,1391,_asyncCompleteWithValue$1,1391,_bind,1365,_bind$1,1365,_bindCache,1366,_box_0,1303,_box_1,1393,_buffer,1289,_cachedRuntimeType,1367,_callOnCancel,1394,_callOnCancel$0,1394,_canFire,1395,_cancelFuture,1396,_canonicalRecipe,1368,_captured_T_1,1540,_captured__convertedObjects_0,1541,_captured_arguments_2,1304,_captured_bodyFunction_0,1397,_captured_callback_0,1398,_captured_callback_1,1399,_captured_completer_0,1542,_captured_data_1,1400,_captured_decryptFrameInternal_3,1676,_captured_dispatch_1,1401,_captured_div_1,1402,_captured_e_1,1403,_captured_error_0,1404,_captured_error_1,1405,_captured_f_1,1406,_captured_future_1,1407,_captured_getTag_0,1305,_captured_getUnknownTag_0,1306,_captured_handleMessage_0,1686,_captured_hasError_2,1408,_captured_headerLength_5,1677,_captured_ivLength_6,1678,_captured_iv_3,1679,_captured_joinedResult_0,1409,_captured_listener_1,1410,_captured_name_0,1688,_captured_namedArgumentList_1,1307,_captured_originalSource_1,1411,_captured_protected_0,1412,_captured_prototypeForTag_0,1308,_captured_result_1,1501,_captured_s_2,1413,_captured_sb_1,1525,_captured_sourceResult_1,1414,_captured_span_2,1415,_captured_srcFrame_4,1680,_captured_stackTrace_1,1416,_captured_stackTrace_2,1417,_captured_target_1,1418,_captured_this_0,1419,_captured_this_1,1420,_captured_this_2,1681,_captured_trackId_0,1687,_captured_value_1,1421,_cell,1309,_chainForeignFuture,1422,_chainForeignFuture$1,1422,_chainFuture,1423,_chainFuture$1,1423,_chainSource,1424,_checkMutable$1,1354,_checkPosition,1355,_checkPosition$3,1355,_checkState,1425,_checkState$1,1425,_children,1689,_clearPendingComplete$0,1426,_cloneResult,1427,_cloneResult$1,1427,_codeUnitAt$1,1275,_collection$_box_0,1500,_collection$_current,1505,_collection$_keys,1509,_collection$_length,1510,_collection$_map,1511,_collection$_nums,1512,_collection$_removeHashTableEntry$2,1515,_collection$_rest,1516,_collection$_strings,1518,_complete$1,1428,_completeError,1429,_completeError$2,1429,_completeWithResultOf,1430,_completeWithResultOf$1,1430,_completeWithValue,1431,_completeWithValue$1,1431,_computeHashCode$1,1502,_computeKeys,1503,_computeKeys$0,1503,_containsKey,1504,_containsKey$1,1504,_containsTableEntry$2,1310,_contents,1526,_controller,1690,_convert$_state,1521,_convertedObjects,1215,_core$_arguments,1523,_core$_box_0,1524,_core$_memberName,1532,_core$_receiver,1536,_createSubscription$4,1433,_current,1276,_data,1356,_decryptionFailureCount,1684,_deleteTableEntry$2,1312,_doneFuture,1434,_dynamicCheckData,1369,_e2ee_cryptor$_box_0,1674,_e2ee_cryptor$_box_1,1675,_elements,1313,_enabled,1682,_encoder,1520,_enumToString,1527,_enumToString$0,1527,_error,1435,_errorExplanation,1528,_errorName,1529,_errorTest,1436,_eval,1370,_eval$1,1370,_evalCache,1371,_eventScheduled,1437,_eventState,1438,_exception,1314,_existingArgumentNames,1530,_expectsEvent$1,1439,_expr,1315,_f,1291,_findBucketIndex,1506,_findBucketIndex$2,1506,_first,1316,_firstSubscription,1440,_forEachListener,1441,_forEachListener$1,1441,_future,1442,_get,1507,_get$1,1507,_getBucket,1508,_getBucket$2,1508,_getRandomBytes$2,1544,_getStream,1691,_getStream$0,1691,_getTableBucket$2,1318,_getTableCell$2,1319,_getUint32$2,1357,_grow$1,1292,_handle,1443,_hasError,1444,_hasOneListener,1445,_hasPending,1446,_hasValidKey,1685,_hasValue,1531,_ignoreError,1448,_index,1277,_interceptor,1321,_internalName,1322,_invalidPosition,1358,_invalidPosition$3,1358,_irritant,1323,_is,1372,_isCanceled,1449,_isChained,1450,_isComplete,1451,_isEmpty,1452,_isFiring,1453,_isInputPaused,1454,_isInt32$1,1278,_isPaused,1455,_isSubtypeCache,1373,_isUnmodifiable$0,1359,_iterable,1279,_iterator,1295,_jsIndex,1324,_keys,1325,_kind,1374,_last,1327,_lastSubscription,1456,_length,1280,_level,1692,_levelChangedController,1693,_map,1329,_math$_buffer,1543,_mayAddEvent,1457,_mayAddListener,1458,_mayComplete,1459,_mayResumeInput,1460,_memberName,1330,_message,1297,_method,1332,_microsecond,1533,_modifications,1333,_modified,1334,_modified$0,1334,_name,1534,_named,1376,_namedArgumentNames,1336,_namedArguments,1535,_nativeBuffer,1360,_newFutureWithSameType$0,1461,_newHashTable,1337,_newHashTable$0,1337,_newLinkedCell,1338,_newLinkedCell$2,1338,_next,1339,_nextListener,1463,_nums,1340,_offset,1513,_onData,1464,_onDone,1465,_onError,1466,_onListen$1,1467,_onMicrotask,1468,_onMicrotask$0,1468,_onPause,1469,_onPause$0,1469,_onResume,1470,_onResume$0,1470,_onValue,1471,_once,1472,_optionalPositional,1377,_pattern,1341,_pending,1473,_precomputed1,1378,_prependListeners,1474,_prependListeners$1,1474,_previous,1342,_primary,1379,_publish,1694,_publish$1,1694,_receiver,1343,_recordPause$1,1476,_recordResume$1,1477,_remove$1,1514,_removeAfterFiring,1478,_removeHashTableEntry,1344,_removeHashTableEntry$2,1344,_removeListener$1,1479,_removeListeners,1480,_removeListeners$0,1480,_requiredPositional,1380,_rest,1381,_resultOrListeners,1481,_reverseListeners,1482,_reverseListeners$1,1482,_rti,1382,_scheduleMicrotask,1483,_sendData,1484,_sendData$1,1484,_set$2,1517,_setChained$1,1485,_setError$2,1486,_setErrorObject,1487,_setErrorObject$1,1487,_setInt8,1361,_setInt8$2,1361,_setKeys$1,1346,_setPendingComplete$0,1488,_setRangeFast$4,1362,_setTableEntry$3,1347,_setUint32$3,1363,_setValue$1,1489,_shlPositive$1,1281,_shrBothPositive,1282,_shrBothPositive$1,1282,_shrOtherPositive,1283,_shrOtherPositive$1,1283,_shrReceiverPositive$1,1284,_source,1299,_specializedTestResource,1383,_stackTrace,1537,_state,1490,_stateData,1491,_strings,1348,_subscribe,1492,_subscribe$4,1492,_subscription,1493,_target,1349,_tdivFast,1285,_tdivFast$1,1285,_tdivSlow,1286,_tdivSlow$1,1286,_thenAwait,1494,_thenAwait$1$2,1494,_tick,1495,_toListFixed$0,1287,_toListGrowable$0,1288,_toggleEventId$0,1496,_trace,1350,_typeArgumentCount,1351,_unlinkCell,1352,_unlinkCell$1,1352,_urlSafe,1522,_value,1538,_values,1353,_whenCompleteAction,1497,_writeString$1,1539,_zone,1498,abs$0,1244,add,1245,add$1,1245,addAll,1246,addAll$1,1246,argumentCount,1247,asUint8List,1248,asUint8List$0,1248,asUint8List$2,1248,bindCallback$1$1,1249,bindCallbackGuarded,1250,bindCallbackGuarded$1,1250,bodyFunction,1217,buffer,1251,callback,1253,ceilToDouble$0,1254,checkGrowable$2,1255,children,1256,close$2,1257,code,1258,codeUnitAt$1,1259,codec,1260,comma,1261,complete,1262,complete$1,1262,completeError,1263,completeError$1,1263,completeError$2,1263,completer,1218,config$1,1264,consecutiveSifCount,1265,contains$1,1266,containsKey,1267,containsKey$1,1267,convert,1268,convert$1,1268,count,1269,createBuffer$1,1270,cryptoKeyRing,1271,current,1094,currentKeyIndex,1272,currentkeySet,1273,dartException,1545,data,1219,day,1546,decode,1547,decode$1,1547,decode$3,1547,decodeFunction,1548,decodeFunction$2,1548,decodeFunction$body$FrameCryptor,1548,decoder,1549,decryptFrameInternal,1220,decrypted,1550,decryptionFailure,1551,decryptionFailure$0,1551,decryptionSuccess$0,1552,deriveKeys,1553,deriveKeys$2,1553,discardFrameWhenCryptorNotReady,1554,dispatch,1221,div,1222,e,1223,elementAt,1555,elementAt$1,1555,enabled,1556,encode,1557,encode$1,1557,encode$4,1557,encodeFunction,1558,encodeFunction$2,1558,encodeFunction$body$FrameCryptor,1558,encoder,1559,encryptionKey,1560,end,1561,endsWith,1562,endsWith$1,1562,enqueueFrame,1563,enqueueFrame$3,1563,error,1564,errorCallback,1565,errorCallback$2,1565,errorZone,1566,exportKey,1567,exportKey$1,1567,f,1224,failureTolerance,1568,fine$1,1569,finer$1,1570,first,1571,firstPendingEvent,1572,floorToDouble$0,1573,forEach,1574,forEach$1,1574,frameType,1575,fullName,1576,future,1577,get$$call,1252,get$_,1208,get$_0,1209,get$__,1210,get$__0,1211,get$___,1212,get$__js_helper$_addHashTableEntry,1300,get$__js_helper$_name,1335,get$_add,1384,get$_addAllFromArray,1274,get$_addEventError,1385,get$_addHashTableEntry,1499,get$_addListener,1386,get$_addPending,1387,get$_asyncComplete,1389,get$_asyncCompleteError,1390,get$_asyncCompleteWithValue,1391,get$_bind,1365,get$_callOnCancel,1394,get$_canFire,1395,get$_chainForeignFuture,1422,get$_chainFuture,1423,get$_chainSource,1424,get$_checkPosition,1355,get$_checkState,1425,get$_cloneResult,1427,get$_completeError,1429,get$_completeWithResultOf,1430,get$_completeWithValue,1431,get$_computeKeys,1503,get$_containsKey,1504,get$_core$_arguments,1523,get$_core$_memberName,1532,get$_core$_receiver,1536,get$_enumToString,1527,get$_error,1435,get$_errorExplanation,1528,get$_errorName,1529,get$_errorTest,1436,get$_eval,1370,get$_eventScheduled,1437,get$_existingArgumentNames,1530,get$_findBucketIndex,1506,get$_forEachListener,1441,get$_get,1507,get$_getBucket,1508,get$_getStream,1691,get$_hasError,1444,get$_hasOneListener,1445,get$_hasPending,1446,get$_ignoreError,1448,get$_invalidPosition,1358,get$_isCanceled,1449,get$_isChained,1450,get$_isComplete,1451,get$_isEmpty,1452,get$_isFiring,1453,get$_isInputPaused,1454,get$_isPaused,1455,get$_keys,1325,get$_mayAddEvent,1457,get$_mayAddListener,1458,get$_mayComplete,1459,get$_mayResumeInput,1460,get$_modified,1334,get$_namedArguments,1535,get$_nativeBuffer,1360,get$_newHashTable,1337,get$_newLinkedCell,1338,get$_onError,1466,get$_onMicrotask,1468,get$_onPause,1469,get$_onResume,1470,get$_onValue,1471,get$_prependListeners,1474,get$_publish,1694,get$_removeAfterFiring,1478,get$_removeHashTableEntry,1344,get$_removeListeners,1480,get$_reverseListeners,1482,get$_scheduleMicrotask,1483,get$_sendData,1484,get$_setErrorObject,1487,get$_setInt8,1361,get$_shrBothPositive,1282,get$_shrOtherPositive,1283,get$_subscribe,1492,get$_target,1349,get$_tdivFast,1285,get$_tdivSlow,1286,get$_thenAwait,1494,get$_unlinkCell,1352,get$_whenCompleteAction,1497,get$_zone,1498,get$add,1245,get$addAll,1246,get$asUint8List,1248,get$bindCallbackGuarded,1250,get$buffer,1251,get$complete,1262,get$completeError,1263,get$containsKey,1267,get$convert,1268,get$cryptoKeyRing,1271,get$current,1094,get$day,1546,get$decode,1547,get$decodeFunction,1548,get$decoder,1549,get$decryptionFailure,1551,get$deriveKeys,1553,get$elementAt,1555,get$enabled,1556,get$encode,1557,get$encodeFunction,1558,get$encoder,1559,get$end,1561,get$endsWith,1562,get$enqueueFrame,1563,get$errorZone,1566,get$exportKey,1567,get$forEach,1574,get$fullName,1576,get$future,1577,get$getKeySet,1578,get$getParticipantKeyHandler,1579,get$getSharedKeyHandler,1581,get$getUnencryptedBytes,1583,get$handleError,1584,get$handlesComplete,1589,get$handlesError,1590,get$handlesValue,1591,get$hasErrorCallback,1592,get$hasErrorTest,1593,get$hasValidKey,1594,get$hashCode,1595,get$hour,1598,get$internalComputeHashCode,1606,get$internalFindBucketIndex,1608,get$internalGet,1609,get$invalidValue,1612,get$isAccessor,1613,get$isClosed,1614,get$isEmpty,1615,get$isGetter,1616,get$isNotEmpty,1618,get$isScheduled,1619,get$iterator,1624,get$keyOptions,1627,get$keys,1630,get$kind,1631,get$lastIndexOf,1633,get$length,1636,get$lengthInBytes,1637,get$level,1638,get$listen,1639,get$log,1644,get$map,1647,get$matchTypeError,1649,get$matchesErrorTest,1650,get$memberName,1652,get$microsecond,1654,get$millisecond,1655,get$millisecondsSinceEpoch,1656,get$minute,1657,get$month,1659,get$moveNext,1660,get$namedArguments,1662,get$nextInt,1665,get$noSuchMethod,1666,get$offsetInBytes,1669,get$onRecord,1672,get$positionalArguments,1700,get$putIfAbsent,1702,get$ratchet,1703,get$ratchetKey,1705,get$ratchetMaterial,1706,get$readFrameInfo,1709,get$recordUserFrame,1711,get$registerBinaryCallback,1712,get$remove,1716,get$reset,1718,get$run,1721,get$runBinary,1722,get$runGuarded,1723,get$runUnary,1724,get$runUnaryGuarded,1725,get$runtimeType,1726,get$schedule,1727,get$second,1728,get$setKey,1733,get$setKeySetFromMaterial,1735,get$setRange,1737,get$setupTransform,1741,get$stackTrace,1750,get$start,1751,get$startsWith,1752,get$stream,1755,get$sublist,1756,get$substring,1757,get$then,1759,get$toBytes,1762,get$toInt,1763,get$toRadixString,1766,get$toString,1767,get$year,1780,getKeySet,1578,getKeySet$1,1578,getParticipantKeyHandler,1579,getParticipantKeyHandler$1,1579,getRange$2,1580,getSharedKeyHandler,1581,getSharedKeyHandler$0,1581,getTag,1225,getUint32$1,1582,getUnencryptedBytes,1583,getUnencryptedBytes$2,1583,getUnknownTag,1226,handleError,1584,handleError$1,1584,handleMessage,1227,handleNext$1,1585,handleUncaughtError$2,1586,handleValue$1,1587,handleWhenComplete$0,1588,handlesComplete,1589,handlesError,1590,handlesValue,1591,hasError,1228,hasErrorCallback,1592,hasErrorTest,1593,hasValidKey,1594,hashCode,1595,hashMapCellKey,1596,hashMapCellValue,1597,headerLength,1229,hour,1598,id,1599,inSameErrorZone$1,1600,index,1601,indexable,1602,info$1,1603,initialKeyIndex,1604,initialKeySet,1605,internalComputeHashCode,1606,internalComputeHashCode$1,1606,internalContainsKey$1,1607,internalFindBucketIndex,1608,internalFindBucketIndex$2,1608,internalGet,1609,internalGet$1,1609,internalRemove$1,1610,internalSet$2,1611,invalidValue,1612,isAccessor,1613,isClosed,1614,isEmpty,1615,isGetter,1616,isLoggable$1,1617,isNotEmpty,1618,isScheduled,1619,isSifAllowed$0,1620,isSync,1621,isUndefined,1622,isUtc,1623,iterator,1624,iv,1231,ivLength,1230,join$1,1625,joinedResult,1232,keyHandler,1626,keyOptions,1627,keyProviderOptions,1628,keyRingSze,1629,keys,1630,kind,1631,lastError,1632,lastIndexOf,1633,lastIndexOf$1,1633,lastPendingEvent,1634,lastSifReceivedAt,1635,length,1636,lengthInBytes,1637,level,1638,listen,1639,listen$1,1639,listen$4$cancelOnError$onDone$onError,1639,listener,1640,listenerHasError,1641,listenerValueOrError,1642,listeners,1643,log,1644,log$4,1644,loggerName,1645,makeIv$2$synchronizationSource$timestamp,1646,map,1647,map$1$1,1647,matchAsPrefix$2,1648,matchTypeError,1649,matchTypeError$1,1649,matchesErrorTest,1650,matchesErrorTest$1,1650,material,1651,memberName,1652,message,1653,microsecond,1654,millisecond,1655,millisecondsSinceEpoch,1656,minute,1657,modifiedObject,1658,month,1659,moveNext,1660,moveNext$0,1660,name,1661,namedArgumentList,1233,namedArguments,1662,names,1663,next,1664,nextInt,1665,nextInt$1,1665,noSuchMethod,1666,noSuchMethod$1,1666,object,1667,offset,1668,offsetInBytes,1669,onCancel,1670,onListen,1671,onRecord,1672,originalSource,1234,padLeft$2,1695,parent,1696,participantIdentity,1697,participantKeys,1698,perform$1,1699,positionalArguments,1700,postMessage$1,1701,prototypeForTag,1236,putIfAbsent,1702,putIfAbsent$2,1702,ratchet,1703,ratchet$2,1703,ratchetCount,1704,ratchetKey,1705,ratchetKey$1,1705,ratchetMaterial,1706,ratchetMaterial$2,1706,ratchetSalt,1707,ratchetWindowSize,1708,readFrameInfo,1709,readFrameInfo$1,1709,recordSif$0,1710,recordUserFrame,1711,recordUserFrame$0,1711,registerBinaryCallback,1712,registerBinaryCallback$3$1,1712,registerCallback$1$1,1713,registerUnaryCallback$2$1,1714,remainder$1,1715,remove,1716,remove$1,1716,removeLast$0,1717,reset,1718,reset$0,1718,resetKeyStatus$0,1719,result,1720,run,1721,run$1$1,1721,runBinary,1722,runBinary$3$3,1722,runGuarded,1723,runGuarded$1,1723,runUnary,1724,runUnary$2$2,1724,runUnaryGuarded,1725,runUnaryGuarded$1$2,1725,runtimeType,1726,s,1237,sb,1238,schedule,1727,schedule$1,1727,scheduleMicrotask$1,273,second,1728,sendCounts,1729,sequenceNumber,1730,set$__ParticipantKeyHandler_cryptoKeyRing_A,1683,set$__internal$_current,1290,set$__js_helper$_current,1311,set$_async$_next,1462,set$_async$_previous,1475,set$_collection$_current,1505,set$_controller,1690,set$_current,1276,set$_firstSubscription,1440,set$_lastSubscription,1456,set$_onDone,1465,set$_pending,1473,set$cryptoKeyRing,1271,set$kind,1631,set$length,1636,set$level,1638,setEnabled$1,1731,setInt8$2,1732,setKey,1733,setKey$1,1733,setKey$2$keyIndex,1733,setKeyIndex$1,1734,setKeySetFromMaterial,1735,setKeySetFromMaterial$2,1735,setParticipant$2,1736,setRange,1737,setRange$3,1737,setRange$4,1737,setSharedKey$2$keyIndex,1738,setSifTrailer$1,1739,setUint32$2,1740,setupTransform,1741,setupTransform$5$kind$operation$readable$trackId$writable,1741,setupTransform$6$codec$kind$operation$readable$trackId$writable,1741,setupTransform$body$FrameCryptor,1741,sharedKey,1742,sharedKeyHandler,1743,shouldChain$1,1744,sifGuard,1745,sifSequenceStartedAt,1746,skip$1,1747,source,1748,sourceResult,1239,span,1240,srcFrame,1241,ssrc,1749,stackTrace,1750,start,1751,startsWith,1752,startsWith$1,1752,state,1753,storedCallback,1754,stream,1755,sublist,1756,sublist$1,1756,sublist$2,1756,substring,1757,substring$1,1757,substring$2,1757,super$LegacyJavaScriptObject$toString,1767,super$_BroadcastStreamController$_addEventError,1213,take$1,1758,target,1242,then,1759,then$1$2$onError,1759,time,1760,timestamp,1761,toBytes,1762,toBytes$0,1762,toInt,1763,toInt$0,1763,toList$1$growable,1764,toLowerCase$0,1765,toRadixString,1766,toRadixString$1,1766,toString,1767,toString$0,1767,trackId,1768,truncateToDouble$0,1769,uncryptedMagicBytes,1770,unsetParticipant$0,1771,updateCodec$1,1772,userFramesSinceSif,1773,value,1185,variableName,1774,warning$1,1775,where$1,1776,worker,1777,write$1,1778,writeAll$2,1779,year,1780,zone,1781" + }, + "frames": "8vTAqIe+7DmC;+HAKAA6C;4CAKCTY;4CACeDE;sKAIlBAE;oBAGOF8B;8OAaAj7DAA8CgBCgEANK2EwG,A,oB;sgBATrC1EAAmB0BDgEAVW2E8E,A,AAUvCCiD,A;6qOK0KW+tBsI;eAEF49BwG;ssDJ5RWmPyC;4LA6BLzEY;mrBAuJqBlJmG;yXA8JlByKuB;uCAAAA6B;uMAuBQ1B6C;+YAYV0B4C;mMAqBL0CAARFjCsB,A;6GAkBWayC;2kBA2OHzWgB;0sDAwH+BzCoC;yJAYjBjjDAA/rBxBgxB0B,A;mRAsuByCiyB+C;g1EAmGCGAQv8BzBHsC,A;gYRq9ByBGAQr9BzBHsC,A;utCR6/BZmY6C;4lBAAAA+C;iNAmBqB9WkC;09BAgDOnc4C;ghBAgCnBA2C;uDASAA6C;8LAyCAnX8F;k1DAqHdAkG;iuBA8NEA+S;u4BA4MAA2C;8xCA0DyBAkB;8oDAkCJAkB;4DAOpBAoE;wDAIiBockF;OAChBpc0B;sJAOC2sCc;4BAIgB3sCoE;sOASjBA0B;4NAiCmBA4B;6FAGtBA4C;ubAsEKopCe;qJAEDFsB;AACEAyB;wrEA0NJlpC+C;cAEAAgG;4rIAyPEA0F;m7DAqF6B8pCmK;AACHiCsK;wRA4HtBv6DAM/gETCkCA7C4Bk1De,A,sB;sPNklElB3mCoG;iEACK6qCiC;qbAyIhB7qCqC;iEAaAAmD;ocCloFOwrCa;8BACcp5DAAsE3BDAF1IAF+B,wG,A;aEoE2BGAAuEpBm2DE,A;8DAtEWiDa;kFAKKn5DAAzCJ03DkB,AAAZyBa,A;yLA+CMAoB;kCACkBp5DAAyD/BDAF1IAF+B,4G,A;aEiF+BGAA0DxBm2DE,A;sEAzDWiDoB;0FAGKn5DAApDJ03DkB,AAAZyB0D,A;0QA0EEp5DAA+BTDAF1IAF+B,wG,A;aE2GSGAAgCFm2DE,A;2NAvBEj2DAA2BTHAF/IAFsB,A,gCE+IAEoG,A;SA3BSGAA4BFi2DE,A;0LAfoCgCmC;oDAElCj4DAAYTHAF/IAFsB,A,gCE+IAEoG,A;SAZSGAAaFi2DE,A;4KAMPp2DAFtJAFiC,+B;2aEkK2Cs4DiC;wjBAsCjCR0B;6ZAaF53DAFrNRFiC,uL;iIEmO2Bu5D8P;o2BA+EX34D6E;gnJWsJsBysDuD;wMA6xBCOuB;mHAS/BNwC;AACAC8C;uvENhpCiBqIsB;6BAOjB7B6D;AAHF7DAAqKUyF2B,A;+DAzJO/DgB;AAFAgEsB;2BAGf7ByE;AAD0CjFAAmKlC6GoB,A;mEApFCtGAAzBsBqGc,A;2FA2BECU;qGA2JzBEiB;kEAyKMnB6B;gZAiFPrFAAhbwBqGc,A;8JAybbCqB;iRAUAAqB;qRAUAAqB;mSAUWxGkB;kRAc3BDAAlaM2G6C,A;uCAsaGpHAA1ZHkHqB,A;iGA4ZQnHkB;kUAgBHqBAAlaILiC,A;AAmaJIkB;+QAUIGAApZT4FqB,A;qHA2ZiC9GkB;iZAiB5BGiC;AACDuGmB;oGAODxGAA1aH4GqB,A;yTAsbI1GAA5aJyGqB,A;6EAgbUL2B;0VAmBNEmE;uEAGDIa;kXAiBCJmE;2EAImBF4B;AACEAiC;AACtBMiB;4YAyB0B7FqL;AASAP8D;0GASbDoC;0PAWiB6FAA9YRpnCkD,A;AA+YrBmmCkE;AAIAJkE;AAIADkC;gVA4CF8BoB;iLAaZ1EsB;sMAuBFEiB;sCAIO4GmC;k4BAoFL9GiE;0EAQFyDmC;qKAiBcWe;uCAENzzD2BAtgBU8yDqC,A;2NAglBFvxDqCAlFlB8tDuD,A;uHAsFc6E0B;aAELiCmC;OAAwB/H4B;iFAOM3sDY;AAA9BsrD2B;uBAA8BtrDAAKrCgwDoD,A;6CAS0B6DsC;AADV77B0E;8CAGXtNAAoCTAAAAAAAACMslCsB,A,A,gB;6CAnC6BqB8C;AAE/BrxDkB;AADOsrD2B;iCACPtrDAAfAgwD8D,A;oEAwCqBzxDqBA1oBH8yD8B,A;uOA0sBlBhxDAAm7F6B8mDkH,A;mFAh7FzB6Ec;wNAiBYuFAAvoCY1EAA6KhByFoD,A,A;AA29BQtGAA9jCeqGc,A;ugBA0kCnBxGAAvgCJ2GmB,A;aAwgCM1GwD;AAiBdiFqD;qOAgBCzwDiDAiCmBksDAApjCZgGoB,A,AAqjCMjGiB,A;8NArBXoBO;AADP2CmB;gKAwCAjwDAA4zF6B8mDiG,A;gQAjzFtBqBO;AADPsHmB;kFAKW9DAA/qCwBqGiC,A;gNAorCCxFAAjlCxByF6B,A;oCAklC4B7GAAxkC5B6GwE,A;iLAmlCCjBe;2KAeN1DI;AADOtBAAtmCFiGqB,A;yJAgnCF1FiC;uBAKVkBiB;8QAsBO4GmC;gCACG9HiC;uBAKVkBiB;uPA4BWHiC;yMAaAAiC;iIAYT5CwF;+YAwCcxtBiC;wEAiBTowB+C;AADS4DAAh7ChBvFAA0EmCqGsB,A,AA1EPxFAA6KhByF2B,A,A;gBAowCQ7GAA1vCR6GyB,A;iCA4vCiBjB0B;AADzB1DW;08HAyOmBuES;wDAGDI4B;6JAYA9FAAvgDVgGsC,A;AAwgDKjGc;0HAMG2Fe;AACFgFyD;AACEhF4B;8KAOGI8B;+CAELEsB;sdAgBMNiB;ktBAgBFI8B;AACjBjyDAAy0EwB8mDAAO/B//CAAGa4kDAAt+HwBqGkB,A,A,4FAm+HhBpvDAAgBd2xDiD,A,qB,A;qNAv1EYlIAA1iDCP0C,A;AA2iDeX6C;AACQiByE;AAGPyF8C;AACOhGyE;AAGPgGiC;AACNjGkC;AACPiGe;oNAWVI4B;uNAaEA8B;uNAaFFqB;6EAKEEsC;AAIFEuB;8XA6BAxGAAlvDwBqGc,A;uRA2vDdxFAAxpDTyF0B,A;wDAuqDajGAAlqDbiGmB,A;6FAoqDStGAA5wDcqGgB,A;4JAqxDV5GAAxqDb6GgC,A;8DA6qDIzGAAvtDJ2GoB,A;gBAguDM1GgB;gWAgBOJAAvrDb4G8B,A;AAwrDG3GO;2CAUDCAAxrDIOsC,A;qPAgsDFsLyC;2JA2LPzMAAHKsMG,2B;iDAKPtMAALOsMG,c;6IAWDlEuB;0IAKOfyB;AACP/DmE;iYAiBOgJW;oGAqCAlEW;iEAeHiC8B;AADPhC2C;+CAGFhEkF;AACHiG2B;qIASS/JmB;8CAGV8E+B;AAEagFiC;+CAEThGoF;AACHiG8B;+IAKSpKmB;8CAGV6E6D;AAEuB/3BAA57Dfw6ByB,A;AA67DK6C0C;sHAGXrJAAtnE6BqG4B,A;AAunEdlHgC;AAKhBmKuC;6EAyCHxF8C;AACAQ0C;iFAyGe+EqC;AADPhCoB;+CAGsB3IAAIpBoHAAvmEPpnCsC,A,AAwmEH6lCwB,AACAN+B,yD;AANG1CAAtGA+HQ,AAAOjCwB,A;sFAmHKgC8B;AAFN9LAA3DKvxBAA3iEJw6BiD,A,A;AAumEFaoB;0HAGL9FAArHA+HQ,AAAOjCwB,A;oKAmIOrHAA9yEgBqGgB,A;qOAszEvBPAAxoEPpnCsC,A;AAyoEH6lCqB;AACAI4B;GACAV+B;oIAWeoF8B;AAFNhMAA/FKrxBAA7iEJw6BiD,A,A;AA6oEFaoB;8HAGL9FAA3JA+HQ,AAAOjCwB,A;sLAyKOrHAAp1EgBqGmC,A;kMAy1EZ5GAA5uEX6GoB,A;wMAkvEazFAA5vEbyFqB,A;gBA6vEiBtGAAh2EMqGyC,A;AAk2Ed5GAArvET6GwB,A;+HA4vEARAA3rEPpnCsC,A;AA4rEH6lCqB;AACAI4B;GACAV+B;oIAWeoF8B;AAFNnMAAhJKlxBAA/iEJw6BiD,A,A;AAgsEFaoB;8HAGL9FAA9MA+HQ,AAAOjCwB,A;wJA4NOrHgB;wKAMVuC+D;oIAKGuDAApuEPpnCsC,A;AAquEH6lCqB;AACAI4B;GACAV+B;sIAOeoFqE;AADPhCoB;+CAMVhJAASYyHAA5vEPpnCsC,A,AA6vEH6lCsB,AACAI4B,AACAV+B,yD;AAfG1CAAnPA+HQ,AAAOjCwB,A;oHAwQMnBe;wFAEIIG;AACCt6BAAhwEXw6ByB,A;qIAywEMNe;uFAGmBFiC;AACZIiE;AAKPEO;AACKt6BAAnxEXw6BiC,A;mJA8yEDpJAAjBO8IqB,qE;AAmBDmDgB;AADPhCkB;+CAMV/IAAUYwHAAv0EPpnCsC,A,AAw0EH6lCqB,AACAI4B,AACAGyC,AACgBoBwB,AAEdxB2B,AAA6BsBc,AAE/B/B6B,yD;AArBG1CAA7TA+HQ,AAAOjCsB,A;oJA4WNrHAAvhF6BqGuC,A;AAwhFrBjHAAz8EFkHmB,A;AA08EULmB;AAChB9GkD;iEAIKpCaApBP/wBAAp1EQw6ByF,A,A;AA02EK6CgB;AADPhCoB;+CAMVlJAAUY2HAAj4EPpnCsC,A,AAk4EH6lCsB,AACAI8B,AACAGgC,AACAb+B,yD;AAjBG1CAAvXA+HQ,AAAOjCwB,A;4FA8ZD/J2H;AAEM+LQ;AADPhCoB;+CAMV7IAAUYsHAA56EPpnCsC,A,AA66EH6lCsB,AACAIsC,AACAG0B,AACAb+B,yD;AAjBG1CAAlaA+HQ,AAAOjCwB,A;6FAifDrKAArDbCoD,AADIjxB0D,AACJixBAAM6CwD2E,AAGPyFgD,AACOhG2E,AAGPgGmC,AACNjGoC,AACPiG4F,iX,AAjBtB3CgC,A;AAyDgB8FgB;AADPhCoB;+CAMVjJAAUY0HAA//EPpnCsC,A,AAggFH6lCsB,AACAIiC,AACAG8B,AACAb+B,yD;AAjBG1CAArfA+HQ,AAAOjCwB,A;iHA0hBDlKsCAZTnxBAAngFUw6B4F,A,A;AAihFK6CQ;AADPhCoB;qJAGL9FAA9hBA+HQ,AAAOjCwB,A;2PAmjBQnBiB;8HAICIwB;AACXtGAAnuFyBqGkE,A;mYA6vFvBPAA/kFPpnCsC,A;AAglFH6lCsB;AACAIuC;AACAGuB;GACAb+B;kTA0KoByGuB;AACJG0B;mCAGTlEmC;oeAcH4D8B;0CAIAA6B;0CAIAAQ;uBAESiBU;AAAkBrEI;AAAqB2C0B;0CAKhDSQ;AAEEaqD;AAA2BII;AAA3BJAA4YD/E0B,A;0CAvYDkEQ;AAAsBjIqC;AAAiBkJ4B;0CAIvCjBQ;AAAsBlIqC;AAAkBmJ4B;2CAIxCjBQ;AAAsB9HqC;AAAe+I4B;0CAIrChBAAsFRDc,AAAYRgC,AACe7DuB,A;gQA3EXsFkB;AACRjBQ;0DAIcpDI;AAAqB2CiB;AAC/BJoB;qDAMI8BkB;AACRjBQ;8DAIcpDI;AAAqB2CiB;AAC/BJoB;qDAMI8BkB;AACRjBQ;8DAIcpDI;AAAqB2CiB;AAC/BJoB;0CAMJa2B;AACACAAqCRDc,AAAYRgC,AACe7DuB,A;yIA9BnBsEAA6BRDc,AAAYRgC,AACe7DuB,A;0CA1BnBiCAAmMSv4BAA2CEu2BwB,AAAmB4DmB,wBACtByBU,AAAkBrE0B,AACPdmC,A,AA5C3BkE8B,AACAA2B,A;2CAjMQCAAqBRDc,AAAYRgC,AACe7DuB,A;2CAlBnBgCAAiMStBAA4CETwB,AAAmB4DmB,6BACjByBU,AAAkBrE0B,AACZdmC,A,AA7C3BkE8B,AACAA2B,A;0CA/LYnCAAwMK2C4C,AAMjBRQ,AAAmB9QyC,AACnB8Q2B,AACACAAnMADc,AAAYRgC,AACe7DuB,A,2B;2GANhB4DuB;8BACG0BU;AAAkBrES;gKAWrBRmC;oGAIX4DyB;yNAaW5DmC;sNAIyCgCsD;yEAM7BlP2C;oCAKjB+RgC;AACArE2B;AAFQGAAt9BCtHAAroEsBqGkD,A,AAuoEjBjHAAxjENkHmB,A,6CA2jEazGAAvkEb2GU,A,AA0kEY4CkB,oI;AA48BxBmBW;AAEYjD8E;AAOZiD0B;qGAMqBiB+E;AAEZ1BmB;qCAGTSW;+GAE4BpD+B;AAChBnHAAhnGuBqGyC,A;AAknG/BkEW;kEAMIbkB;sCAMJaW;qJA+BKTmE;AAnBY0BuF;oFAwBI1B8C;sCAIbAiC;sCAIRS8B;oCAIJAwB;kEAKKT0B;2CAGIAiG;AAC0BcyD;AACbAgB;wCACczDqB;AACmBrBAA/6FlBpnC8D,A;AAg7FfmmC+D;AAIAJ+D;AAIAD2B;AACpB+FW;0GAWAAW;iCAIW/D6C;qMA0CLsD8B;0BAERSW;AAAsB/HqC;AAAgBgJuB;gDAItCjBW;AAAsBnIqC;AAAcoJuB;qKAOnBrF+B;AAAmB4DW;wBACtByBU;AAAkBrEkB;AACPd8B;iLAmBbuEK;8QAUM1Ee;8FAEAFU;gGAOAEe;iGAGAFU;mHAOLhGAA5zGsBqGc,A;wEA8zGRjHAA/uGfkHS,A;qCAgvGYnHmC;AACP+GiB;gDAEDIW;yDAIElHAAvvGNkHqC,A;AAwvGDtGAAv0GwBqGqB,A;gMA80GbvGwC;AACPoGiB;oBAEDIW;yLA0DDvGkG;AACGsJ8B;8BAETnH0E;AACFoHqC;oTAsELnOqF;6DAEY6EAAv9GuBqGc,A;uGA89GnCxrDAAihBEmlDc,A;6IA3gBesGE;AADH1GAA71GFyG6C,A;4BAk2GArGAAx+GuBqGiE,A;qFAm/G7B5GAAt4GM6GuB,A;iMAm5GN7GAAn5GM6GgC,A;oEA65GNzFAAv6GMyFgC,A;oGAq7GRzFAAr7GQyFgC,A;wNA+8GR7GAAr8GQ6G4C,A;2QA69GJjGAAl+GIiGgC,A;gFAq/GR7GAAh/GQ6GmC,A;mRAwgHJjGAA7gHIiGuB,A;uZAwiHI3G2B;AACAAiC;AACGuGwC;AACAAmB;sBAGkBD8D;AACAA8D;0DAGjBK+B;AACAAe;iNAShB5GAAxiHQ4GkB,A;AA0iHR5GAA1iHQ4GuB,A;s7BA8kHM5FAAxlHN4FkB,A;AAylHM5FAAzlHN4FiC,A;mCA8lHsB9G6B;AACAA+C;AAEQiB6D;AAGAA2E;AAGPyF8D;AACAAe;+MAKOhG6D;AAGAA2E;AAGPgG8D;AACAAe;iOAMAFa;+CACbMuC;4GAOaNa;+CACbMmE;8GAUfNuC;+CAEeMuC;gDAMOrGmC;AACAAoC;AACPiGoC;AACAAe;yFAIFMyB;iGAGEAoB;kGAIEJwB;qIAMcJwB;uEAENAwB;kCACbMkD;0GAQRFwB;0MAeIvGAAlvHH2G4B,A;AAmvHG3GAAnvHH2GwB,A;2CAiwHOxHAAt0DLsMG,iB;2FAy0DC9EwB;0CAIM2CiB;sEAEHjDiB;AACWtyBuDA0NIyxBa,AAAjBuFc,A;6GAxNOpEc;qEAIT1GoB;0DAcFAqB;AACAAoB;mIAyBIoGe;uEAUAIgB;AACAA6B;qIAgDA/F2B;AACAAgC;AACA2FqC;AACAAiB;yBAEF1FAAn2HFgGe,A;AAo2HEhGAAp2HFgGmB,A;4FAw2HMFkB;AACAA6B;8EASPtGAA38HwBqGwF,A;kGA+8HIxFAA52H3ByF2B,A;iCA62H+B7GAAn2H/B6GkB,A;mEA+2HiBnLsG;4DAUlB6EAAt+HwBqGc,A;6IA0hI1BuDqE;AACE1De;kDAEEMa;8HAWoBnBa;AAAjBuFI;s/DQlsIZjMgC;sFA2BRjgC+C;kEAwGOA2BApCSugCAAAAvgC0D,A,sC;iJAmDC2vBQ;4yBA2DE3vBgF;AAAAsmCqE;geAiCP90B0C;+hBC9SIAoB;+NAYVo1BsC;2JAMJA4F;OAEgBhJmC;wMA8kBFoFyB;uBACIlEsD;0FAIA3nB6E;+QAehBquBiF;sOAfgBruBiB;6NAiCXh5B2C;QAAAAwB;odAmJkBqkDuE;iFAGYtCwD;AACxB9P6B;2pBAiCcKkB;iHACD0WyB;2JAGWjHkC;AACxB9P+B;uOAuFIGkB;woBAkBT2J8D;wHAMgBiJqC;AACF8BgL;AACZjGmI;gNAcIiGmK;0FAEVqBoI;4BAGAZoG;gYA4EGtNoB;qqBGhgCK8MoD;qGAUqBllC4E;oJAKrBklCoD;oYAoBkBllC2E;8mBA2D3B7hB4E;4GC8/ED6hB0D;OAAAAuD;geCz9EOo4B8C;+NAgXNhI2C;6uREXkCpwBuF;uEAQ9BAqF;6HCjbMAqD;8YAoBNy7BkB;8kGEuQHyR4N;cAAAAqK;cAAAAsJ;cAAAA4E;cAAAAoL;qDAAAA0E;uEAAAA6F;cAAAAqK;cAAAAuJ;cAAAA4E;cAAAAmM;cAAAA4E;cAAAAsI;inBAyTO3GkR;sqGAmFkBvmCAb+hBQ6/B4B,A;84Cavdf2G4D;s2EDzfehCAAL9BsGgBtB/OwB+B4C,A,A;uyBsBobjB5asC;AAEDgVc;uFAGFAc;0EAGEAkC;yJAsBOrJkC;s6BfrfX6IAeyLS8FAxB4NXjCsB,A,A;eSnZA/FiC;68DmB9D2BvkCyB;wvBCm0B1Bq8B8G;yPAwBcr8BkD;gGAGpBq8BiG;2LAMKZkB;wxFC9qBakOwE;i0FCvJNtCsD;myCChBUrnCuF;8JA4gBRAA8B8nBSAA/ChmCvBAAAtB0B2vBAAAA3vBuF,A,A,mE,A;+tBiBulBJAuF;kpGErjB1BAAAAAA6HAQEyc8E,oE,A;m7CC7EA4U2H;AAIUiYoE;AADAtpC6BFkHem5B4E,A;8DElHfn5BAF0H0BkzBqB,AAGlBiHAAAAn6B2B,A,A;8NEnHlB05B6DFuHErIO,A;AEpIFAyH;AAaAqIc;AAVU15BAF0H0BkzB0C,A;AEhHpCwG+H;8OASGmCiC;8eAMOrImN;qCAAAAW;kCACAkCkB;kFAIZrEwE;AAEI+a4D;AAAA9C8D;AACFjYU;AAHFAuF;AAIEiYiC;uEA+XFA4B;4kEI9aqCRwE;2FAYADuF;olBGiBvB7oCA9BgxCyB6/BkD,A;qpBb5+BPsL2C;uOAqB5B5ByD;6qDGlMF9tBwE;mKA4GAAiF;8oBA2COzbkE;QAAAAkE;sNA2ZqBwqCmD;mDAaFxqC+C;+BAAAA4B;gqBAqGJ3a6B;oEAAAAAI6Sd48C+B,A;sgCsC/gCqBtGAA6ESngB0C,AAAiB+T8B,A;0jBA8DhDgT4M;sBAAAAwD;yBAAAAqH;+lHlBoJE+JoC;iyCmBrSPhKuGA+BUsCqN,AAEI5kCAjC4rCqB6/B6C,A,qI;iSiC9qCpBkJAjCirCfzJe,oB;AiChrCUloBkB;6ezClGcpXmD;uBAAAAwC;4FA+IxBA2E;gBAAAA+E;m5BA6MwBAa;6GAAAA+B;29BA6DAA8C;8FAAAAqC;+CAIxB49ByB;iFAAAA8D;qvC6ChVO6FuB;4NAkBF4GI;gcAmBerqCoB;kBAAAAqC;wIAyCpBAa;qEAAAAgE;wvBjDiCsCitCoC;4LAmBpC/akC;2QAQW6YiB;kRAIXhZkC;yXAKM/xByE;kEAEeitCa;2FAGlBjtCgE;4yDA26CqBiyBwC;8jDA85BCyUmB;AAAeAiB;8IAOQAiB;4DAOlCvCuC;AACAgHgC;8dQ1jFInrC8C;8IAAAAwC;0IAUbq/BAA+PiBgDa,A;+NAhOEA+C;+MAKAAe;8QAUf1BaAgLNyB6C,A;iyBA/JLvQ4LAQWuQgB,gV;gqCA0EaCa;6PAUAAa;uVAiBDriCwB;qhDAwHlBAa;4EAAAAyD;ulCInTgBoXAA0xCjBmoBoD,A;uLA1xCiBnoBAA2xCjBooBgD,A;4PAx9BGx/B+C;+kBAiEMkbwD;0hCAigBf6jB8F;AAEEmH2gB;uuFAyaK5Ge;mjBNzsCA7tD+B;0DAAAAAAia0Bk1D4B,A;iDA3ZDr4CqBAmaZq4CiC,A;itCQ7bhB/D6B;quCAsKA5iCoC;2oCoCpFAgyB8B;AACShyBoC9B2gBEwRgB,mDADjBxRAAAAAiGAKcm4B0C,A,A,A;+B8B7gBWn4BAAlLrBAA7B80BAAADjvBgBwR2H,A,A,A;gC8BqFKxRAAlLrBAA7B80BAAADjvBKmtCAAWKrIAA4BL1MmB,A,yEA1BK2M0EA+DL5MmC,A,A,A,A,uI8B/KTn4BAAAAA0G,A,A;AA+LE69ByZ;wJAgCa79BS;AADTgyB4B;mFAIOhyBkE;iOA6DPsjCe;4GAKAD2B;qMAaEjDsB;iPAMA4E8nB;wEAUF3BqC;uEAOArRqC;8rBAkB0CsRkB;wCAIjCtjCS;AADTsjC4B;kOASAD2B;sDACAZyD;sEAMEYuC;ilBnCrUMY6B;+HAEZ/E6B;iSAgBY+E6B;2NAgIPtT4B;8BACEwW2B;AAA6BhHAAxB7B6MmC,sB;yRAwCI7FqB;qdAUL1W+B;khBA2IkBjf2C;oTAalB4mBgB;uEAQkBp4BqE;oBACPi7BqH;wJASOj7ByE;oBACPysCsH;uXA2GbzIe;kOAQiBlFmF;AACLqEmC;2HAQdhlD0C;QAAAAwB;qSAQE6lDe;qeAiBiBlFmF;AACLqEmC;kMAQdhlD0C;QAAAAwB;2rCA+IFmoD2D;wJAMW9DyC;6bAYXiDuBApOoBzlC4C,A;2bAkSpB7hB0C;QAAAAwB;6YAwBAAyC;QAAAAwB;g8DA8EyBmyCmBA3nBlB6WsB,AAAUJAAhEViG6B,gE,A;0FA6rBkC9MS;qDAAAAoB;uDACDAE;gEAAAAiD;gEAEHlgCkF;2MAKiBmjCkD;AAC3BX8D;AACqBtCE;2DAAAAkC;mKAWrBmEAAiGzB9H2C,qC;6iCAjF6BlMgB;wBAAAAE;0BAAAAAAptBxB8W2B,AAA+BzCoBA1B/BsIM,2E,A;oJAgvB4BhtC6E;4RAOIkgC8C;OAAAAyD;6DAElBxPAAruBd+ToC,A;gPA0uBsBvEsC;OAAAAoC;mMAGQlgCkF;oPIqZXAyF;+jBAQtBi/Be;6BAAAA+BJtpBFqHuF,oD;4BM8J4BzVkD;yxBDtiB1BkSe;+HAIc/iC8D;kMAiDSA+B;uBAAAAuC;8DAC3B+a8M;AACK2nBe;2RAciBaoB;wQA6FlBb4C;6IAEoBwBwFAhM2BjS8E,A;gPA2M5BsRwC;8uBA+BcxD8G;qWA0HjC1Ne;keAWFlCQ;6BAAAAuNA6BFkHuC,A;wqCJqkCEjH8E;2aAYAA8E;wtCM/pDKpwBuC;kHAAAA2C;mNAU8B2iCa;4JAGHAa;ktDAqDhCwC6HAOShGAAoOeyKgE,A,8e;g9DAvGrBjHiE;gJAuEMxDAAgCeyKmC,A;6fAmEnB5pCa;mFAAAA2C;s0BqB/ZmBAyC;uDAAAA2B;yBAAAAoC;mLA2IUA2E;QAAAA2E;iDA6WjBwqCmD;+xBpBlafpOoD;AACAAAC0YJ8K8B,A;ADzYI9K2C;qWAiOiB9IoB;gCACMNAfpTLhzBa,A;oEeoTKgzBoBfpTLhzBwC,A;0CesTHy7B0B;mTE7FTxJe;uCACIjyB8F;8gBAoFDwcAA3B6BxcAby/BL6/B6C,A,A;uXalxBZ7/BAbkxBY6/BiB,A;QajxBvB7/BwC;2EAEd2bgS;6UAqHyB3bAb0pBY6/BiB,A;ofYhtBxBvoCAAlkBM+xC2B,A;AAkkBfjNAAjGJ8KyB,A;AAkGI9KAAlGJ8K8B,A;gQjBtTExSiB;AAAgCAe;AAChCFuB;AAAqBAqB;uMoB6aA8H0C;sBACDzH2C;sBACAjHyC;sBACAoD6C;sBACE2D+C;sBACAuE8C;wBACCzEwD;AACbD4C;0zElB9GY1C6C;uTekEN9xBsJ;8LAKdo8BAAxFJ8K8B,A;uYAuGoB5vCAAxkBD+xC8F,A;uhJAwBW8BuC;yxDQ2f1BnrC8C;gcAmGK7FwCAxDK6uCAzBnkBNhCAARIFgP,A,A,A;OyBmoBH3sCAAxDK6uCyC,A;iQAqEVmBwI;2GAGmBe8D;0cASa5B6B;mJAUnBAsC;y1BuB1ZTtpCoH;6GAYR85BA3C0gBAiFkC,A;oD2C1gBAjFA3C0gBAiF4D,iC;oD2CxgBkBuMiC;2DAEhB1J8G;AAEa5RyC;sDAKAqI+B;ggDtB1EfhHkH;AACK4BkF;AAEH5BU;AAHFAiG;uDAMsBqQgC;sPAAA1D4B;OAGpBLQ;+HAIKOO;AADAA0C;AACAAuE;kEAELjCkC;+KAGE1EAA7BG2GW,oBAAoBPQ,A;imBA6CTziB8H;AAALouBuD;6BAEOA2C;gBAChBlaiF;AAIiBmM+D;wgBAQbnMO;AAZJA2G;8DAgBIAO;AAhBJAgF;uaAmCSpvBArB8/BwB6/ByB,A;AqBz/BZ3kBgD;AAALouB2D;kCAEKAgD;gBACrBlasF;iDAE6CkaiC;AAAdrL8B;AAAcqL+B;AAClCrL8D;oCACwBqLS;AAAdrL8B;AAAcqLoC;AAGsB9L+C;oBAA3CCwB;OAkBXz9BoE;kEAWmCs9BoC;2FAI/BYkC;6lCAWYoL2B;sCAEAA0B;0FACfzWsB;mKAGOqL6E;0KAMb/O+D;omBAcc8DoG;AALVsEAAnKC2GW,oBAAoBPQ,A;mYAiLrB1K4F;kQAEKe6FAlMkCiY8B,oH;6BAkMlCjYAA7LX8F2C,AACAA2C,AACAAsB,gS;AA+LERArB2XFyFmE,0E;AqB1XEzFuCrB0XFyFmE,yD;0DqBxXuCuKY;AAAPAiB;AAAOA0F;AAM7B3La;iVAIHDkB;AATAUiC;qRAWLhP2E;oCAC2FlU2D;oCACzElbAYnZXAyH,A;uCZsZY+oCoB;gHACQ7tB4E;4UAQzBqcAApNG2Ge,AAAoBPQ,A;yLAyNb1K8H;AAMZ7DU;AAzBAAwF;AA0B4D6DqC;mmBAE5DgJ0D;yRAQYhJgG;AALVsEAArOG2GW,oBAAoBPQ,A;g/DAyP3BxOqE;kNAQoB8CiC;yEAEdYsB;mKACJ1DU;AAXFAsE;AAYa+O8C;AACX9OiH;sFAIEyDsB;yWAMAzDU;AAVFAwC;+HAYMqMAUqCW+OoE,A;AVrCoB/OAUqCpB+OyH,A;AVpCbxSqDI9e4BiTAVgOd3GApBhJE8DE,A,A,wD8B/EO6CAV+NT3GApBhJE8DE,A,A,2D;A0B+ZZ9ViHIxdO2YAVyMG3GApBhJE8DgD,A,A,+L;A0BoaIpoC0D;AANpBg4BAI9e4BiTAVgOd3GgE,A,A;6VMkRZlVU;AAjBNAmE;iFAmBwBpvBAYpejBAiE,A;oCZqeyB+oCoB;oMAG1B5ZU;AApCRA4F;AAqCmB+O+E;AAEX9OU;AA1BNAuG;AA4BIAU;AA5BJA2F;AA6Be8OqF;0OAUXjL4F;i3BAWJ7DU;AAlDAA2E;4PA2DyCwBgD;qNAGrC2GAApUC2GW,oBAAoBPQ,A;2IAyUX1KiM;igCAqFZ7DU;AAxJFA4D;ovBAkKAlBAC9gBFuKkF,A;ADghBErJU;AApKAAiF;mCAqKwGlUmD;oCAEtFlbAYxnBXAyH,A;uCZ2nBY+oCoB;4KACM7tBoE;yMAKvBqcAAtbG2GW,oBAAoBPQ,A;yLA2bb1KwI;AAMZ9DU;AAxMFA2F;AAyMkE8D8C;mzBASpDAgG;AALVsEAAtcG2GW,oBAAoBPQ,A;+kDAoVW2LY;AAAPAiB;AAAOAwC;sKAMtB3Lc;iXAOHDkB;AAZAUyB;kZAcThP6C;AACiElU0D;6MAKjEkUU;AANAAmC;AAOiElUmD;+KAE/DiUkJ;uYASACU;AAlBFAkF;4JAoBEAU;AApBFA4K;oIA6BY6DgG;AALVsEAA3XC2Ge,AAAoBPQ,A;+nCAwYH9KoE;u3BAUyBAW;+2CCnkBjCTASIMHyB,A;0mBTiEpBgK6C;4uCAwBKyBkB;AADASO;AADyBmLO;AAAPAkB;OAAAAQ;AAAOAS;AACzBnL+E;gIAEMjjByE;gcAEX+gB2C;0/GA2BGyBkB;aARiC4LO;AAAPAkB;OAAAAQ;AAAOA+B;AAGnBhM0C;AACaGiB;AAAV6LiC;AAAU7Le;kCAEEEQ;qkBAO3BlhBmI;2pBAI+B6sBY;AAAPAkB;OAAAAQ;AAAOAyD;AACkB3La;gNAEnDDkB;wFADsCCQ;2nBAQ3ClF4C;WAAAAqE;8uBAIAvcoK;AAE+BO2H;uHAE/BAuH;86BAM6B6sB0B;AAAU7L6B;AAIC6LY;AAAPAkB;OAAAAQ;AAAOAwB;AAEjB3L8E;0EAEkBAa;0TAIpCDkB;0FAFwBCQ;8sCAcI2LY;AAAPAkB;OAAAAQ;AAAOAwB;AAER3L8E;6HACpBDkB;AAFAUoF;2EAGSljB2I;mnBGpNG+vBAVkNG3GApBhJE8DsC,A,A;6nB4BzCtBtrCA4B/BAyuC0B,A;+W5BuCEla4E;AACwBiYkF;4BAIEAiB;0BACP7Lc;mCACSAuB;6BACNAiB;4BACFAe;6BACEAiB;mCACMAuB;gGAK1BxBmG;8FASwBqNyC;AACAAyC;w6BAQX/LW;AAAL+LuB;kJAGZptB8C;ulFAQiC6sBoB;AACnBhsCAVxDuB8wBCA+BHE2B,A,A;6TU6BRgboB;AACRhsCAV7DmB8wBC,A;AUwDvB9wBAVxDuB8wBAA+BHE2B,A,A;oXUmC5B7RU;AAnBNAwD;6DAuBsBkwB4D;AAAA9C2G;qFAAZtpCADhFyCo3B2F,A;ACmF7CkSgB;AAAKpLY;AAIHPQ;wVAMFzhBU;AApCNAsF;yDAsCMotB8B;AAAKpLY;AAIHPQ;6eASEzBAjCgHHl8BmD,A;AiChHuBs7BwE;AAApBY8C;AAAoBZ2D;kMAEtBpfU;AArDRA+B;kDAsDQmdqDFkCNhIqE,iEAIFnVU,A;AE5FAAuC;AAsDQmd8I;AAEFiQ8B;AAAKpLY;AAKHPQ;kzBAcFzhBU;AA3ENAgD;sUAgFQ+fyG;AACAqNkB;AAAKpLY;AASHPQ;4xBAcJ2LkB;AAAKpLY;AAQHPQ;qQA1DAtEkE;8JAiEFndU;AAvHNA6E;kDAyHMotB8B;AAAKpLY;AAKHPQ;sXAMkBoLoB;AAAShsCAVnLE8wBC,A;AUwDvB9wBAVxDuB8wBAA+BHE2B,A,A;6SUyJ1BkOU;AAzDAA+F;AA0DAqN8B;AAAKpLY;AAKHPQ;0eAKFzhBU;AApJRA8E;AAqJQ0dAD5KRvIU,A;AC6EQgIAFkCNhI6E,A;AE6DMuI+I;2KAGA1dU;AAxJRA+G;kRA+JMotB8B;AAAKpLY;AAOHPQ;+sBAWA1BU;AAjGAA+F;AAkGAqN8B;AAAKpLY;AAKHPQ;wcAMFzhBU;AA7LRA2D;iaAiMQAU;AAjMRAmF;mWAwMMotBkD;AAAKpLe;AAQHPQ;0IAJ2BtgCAVtQCkxBCiChBjBEgE,kK,A;+MvBiSbvSU;AAvNNA4F;iFAyNUggBAjCtDHl8BmD,A;AiCsDuBs7BwE;AAApBY+C;AAAoBZ2D;oFAEtBpfU;AA3NRAkD;gFA4NQsdgB;AAtKAHqC;AAsKAGAFpJNnIU,A;AElBMgIAFkCNhIoH,A;AEoIMmIc;AAtKAHmD;AAsKAGAFhJRtdU,A;AE5EAAwC;AA4NQsdiJ;AAGF8P8B;AAAKpLY;AAKHPQ;4pBAWA1BU;AA/JAA+F;AAgKAqN8B;AAAKpLY;AAKHPQ;mdAMFzhBU;AA3PRAiF;wWA8PQAU;AA9PRAkH;0SAoQMotBkD;AAAKpLe;AAOHPQ;gJAH6BtgCAVlUDkxBC,A;AUsQDlxBAVtQCkxBAiChBjBEgE,A,A;AvBkVkBpxBAVlUDkxBiJ,A;2HU2UhBwaoB;AAAShsCAVhUQ8wBC,A;AUwDvB9wBAVxDuB8wBAA+BHE2B,A,A;sPUqS1BkOU;AArMAA+F;AAsMAqN8B;AAAKpLY;AAKHPQ;4SAGJ9DoE;AACA3dU;AA/RNAwC;0OAiSQ2dAFhNR3dU,A;AEjFAA0C;AAiSQ2d+FF9MRhHsB,iE;AEiNMyW8B;AAAKpLY;AAIHPQ;2ZAOFzhBU;AA/SNA0G;0LAkTM4fgB;AA5PEzCqC;AA4PFyCAFzMJzKU,A;AEnDMgIAFkCNhIgG,A;AE0NIyKc;AA5PEzCmD;AA4PFyCAFrMN5fU,A;AE7GAAwC;AAkTM4f0I;AAEAwN8B;AAAKpLY;AAIHPQ;8VAMFzhBU;AA9TNAgF;mVAmUQotBkB;AAAKpLY;AAMHPQ;4NAEF2LkB;AAAKpLY;AAKHPQ;sSAKN1BU;AArQIA8C;yhDCjJO7JAVmXOHgC,A;ueUjMlBEAA1B0BkLc,2BAAAAkB,A;2IA+CxBr9BAwBhMcirCAjCsMA3GApBhJE8D+C,A,A,sG;iQ6BoNIJAfjIpBhoCiH,A;mDekIC66BActLmB76B2B,oBAAAA+B,A;sMd4LO+amJ;8QAvPSqpB+lBAgC5BtBqC,yEAAAAAAGhBAAAAAAkI,A,A,A;i0+BpCuxCmB2IsK;CAAAAG;oWAUAC0K;CAAAAG;kWAUAC0G;CAAAAG;wXAUAC8G;CAAAAG;8PyB30BgC7CAbgiB/CzJc,oB;mtBkB1hBgBsKkB;yGyBnhBgB5pC+BAsLpB+9B2C,AAEhB/9B8B,A;" + } +} diff --git a/example/web/favicon.png b/example/web/favicon.png new file mode 100644 index 0000000000..8aaa46ac1a Binary files /dev/null and b/example/web/favicon.png differ diff --git a/example/web/icons/Icon-192.png b/example/web/icons/Icon-192.png new file mode 100644 index 0000000000..b749bfef07 Binary files /dev/null and b/example/web/icons/Icon-192.png differ diff --git a/example/web/icons/Icon-512.png b/example/web/icons/Icon-512.png new file mode 100644 index 0000000000..88cfd48dff Binary files /dev/null and b/example/web/icons/Icon-512.png differ diff --git a/example/web/icons/Icon-maskable-192.png b/example/web/icons/Icon-maskable-192.png new file mode 100644 index 0000000000..eb9b4d76e5 Binary files /dev/null and b/example/web/icons/Icon-maskable-192.png differ diff --git a/example/web/icons/Icon-maskable-512.png b/example/web/icons/Icon-maskable-512.png new file mode 100644 index 0000000000..d69c56691f Binary files /dev/null and b/example/web/icons/Icon-maskable-512.png differ diff --git a/example/web/index.html b/example/web/index.html new file mode 100644 index 0000000000..db7638342a --- /dev/null +++ b/example/web/index.html @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + webwasm + + + + + + + + \ No newline at end of file diff --git a/example/web/manifest.json b/example/web/manifest.json new file mode 100644 index 0000000000..90bcb36f36 --- /dev/null +++ b/example/web/manifest.json @@ -0,0 +1,35 @@ +{ + "name": "flutter_webrtc_example", + "short_name": "flutter_webrtc_example", + "start_url": ".", + "display": "standalone", + "background_color": "#0175C2", + "theme_color": "#0175C2", + "description": "A new Flutter project.", + "orientation": "portrait-primary", + "prefer_related_applications": false, + "icons": [ + { + "src": "icons/Icon-192.png", + "sizes": "192x192", + "type": "image/png" + }, + { + "src": "icons/Icon-512.png", + "sizes": "512x512", + "type": "image/png" + }, + { + "src": "icons/Icon-maskable-192.png", + "sizes": "192x192", + "type": "image/png", + "purpose": "maskable" + }, + { + "src": "icons/Icon-maskable-512.png", + "sizes": "512x512", + "type": "image/png", + "purpose": "maskable" + } + ] +} diff --git a/example/webrtc_example.iml b/example/webrtc_example.iml deleted file mode 100644 index 485a35d430..0000000000 --- a/example/webrtc_example.iml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/example/webrtc_example_android.iml b/example/webrtc_example_android.iml deleted file mode 100644 index 0ca70ed93e..0000000000 --- a/example/webrtc_example_android.iml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/example/windows/.gitignore b/example/windows/.gitignore new file mode 100644 index 0000000000..d492d0d98c --- /dev/null +++ b/example/windows/.gitignore @@ -0,0 +1,17 @@ +flutter/ephemeral/ + +# Visual Studio user-specific files. +*.suo +*.user +*.userosscache +*.sln.docstates + +# Visual Studio build-related files. +x64/ +x86/ + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ diff --git a/example/windows/CMakeLists.txt b/example/windows/CMakeLists.txt new file mode 100644 index 0000000000..e5b4202e39 --- /dev/null +++ b/example/windows/CMakeLists.txt @@ -0,0 +1,102 @@ +# Project-level configuration. +cmake_minimum_required(VERSION 3.14) +project(flutter_webrtc_example LANGUAGES CXX) + +# The name of the executable created for the application. Change this to change +# the on-disk name of your application. +set(BINARY_NAME "flutter_webrtc_example") + +# Explicitly opt in to modern CMake behaviors to avoid warnings with recent +# versions of CMake. +cmake_policy(SET CMP0063 NEW) + +# Define build configuration option. +get_property(IS_MULTICONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) +if(IS_MULTICONFIG) + set(CMAKE_CONFIGURATION_TYPES "Debug;Profile;Release" + CACHE STRING "" FORCE) +else() + if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") + endif() +endif() +# Define settings for the Profile build mode. +set(CMAKE_EXE_LINKER_FLAGS_PROFILE "${CMAKE_EXE_LINKER_FLAGS_RELEASE}") +set(CMAKE_SHARED_LINKER_FLAGS_PROFILE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE}") +set(CMAKE_C_FLAGS_PROFILE "${CMAKE_C_FLAGS_RELEASE}") +set(CMAKE_CXX_FLAGS_PROFILE "${CMAKE_CXX_FLAGS_RELEASE}") + +# Use Unicode for all projects. +add_definitions(-DUNICODE -D_UNICODE) + +# Compilation settings that should be applied to most targets. +# +# Be cautious about adding new options here, as plugins use this function by +# default. In most cases, you should add new options to specific targets instead +# of modifying this function. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_17) + target_compile_options(${TARGET} PRIVATE /W4 /WX /wd"4100") + target_compile_options(${TARGET} PRIVATE /EHsc) + target_compile_definitions(${TARGET} PRIVATE "_HAS_EXCEPTIONS=0") + target_compile_definitions(${TARGET} PRIVATE "$<$:_DEBUG>") +endfunction() + +# Flutter library and tool build rules. +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# Application build; see runner/CMakeLists.txt. +add_subdirectory("runner") + + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + + +# === Installation === +# Support files are copied into place next to the executable, so that it can +# run in place. This is done instead of making a separate bundle (as on Linux) +# so that building and running from within Visual Studio will work. +set(BUILD_BUNDLE_DIR "$") +# Make the "install" step default, as it's required to run. +set(CMAKE_VS_INCLUDE_INSTALL_TO_DEFAULT_BUILD 1) +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +if(PLUGIN_BUNDLED_LIBRARIES) + install(FILES "${PLUGIN_BUNDLED_LIBRARIES}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + CONFIGURATIONS Profile;Release + COMPONENT Runtime) diff --git a/example/windows/flutter/CMakeLists.txt b/example/windows/flutter/CMakeLists.txt new file mode 100644 index 0000000000..903f4899d6 --- /dev/null +++ b/example/windows/flutter/CMakeLists.txt @@ -0,0 +1,109 @@ +# This file controls Flutter-level build steps. It should not be edited. +cmake_minimum_required(VERSION 3.14) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +# TODO: Move the rest of this into files in ephemeral. See +# https://github.com/flutter/flutter/issues/57146. +set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper") + +# Set fallback configurations for older versions of the flutter tool. +if (NOT DEFINED FLUTTER_TARGET_PLATFORM) + set(FLUTTER_TARGET_PLATFORM "windows-x64") +endif() + +# === Flutter Library === +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/flutter_windows.dll") + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/" PARENT_SCOPE) +set(AOT_LIBRARY "${PROJECT_DIR}/build/windows/app.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "flutter_export.h" + "flutter_windows.h" + "flutter_messenger.h" + "flutter_plugin_registrar.h" + "flutter_texture_registrar.h" +) +list(TRANSFORM FLUTTER_LIBRARY_HEADERS PREPEND "${EPHEMERAL_DIR}/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}.lib") +add_dependencies(flutter flutter_assemble) + +# === Wrapper === +list(APPEND CPP_WRAPPER_SOURCES_CORE + "core_implementations.cc" + "standard_codec.cc" +) +list(TRANSFORM CPP_WRAPPER_SOURCES_CORE PREPEND "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_PLUGIN + "plugin_registrar.cc" +) +list(TRANSFORM CPP_WRAPPER_SOURCES_PLUGIN PREPEND "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_APP + "flutter_engine.cc" + "flutter_view_controller.cc" +) +list(TRANSFORM CPP_WRAPPER_SOURCES_APP PREPEND "${WRAPPER_ROOT}/") + +# Wrapper sources needed for a plugin. +add_library(flutter_wrapper_plugin STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} +) +apply_standard_settings(flutter_wrapper_plugin) +set_target_properties(flutter_wrapper_plugin PROPERTIES + POSITION_INDEPENDENT_CODE ON) +set_target_properties(flutter_wrapper_plugin PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_link_libraries(flutter_wrapper_plugin PUBLIC flutter) +target_include_directories(flutter_wrapper_plugin PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_plugin flutter_assemble) + +# Wrapper sources needed for the runner. +add_library(flutter_wrapper_app STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_APP} +) +apply_standard_settings(flutter_wrapper_app) +target_link_libraries(flutter_wrapper_app PUBLIC flutter) +target_include_directories(flutter_wrapper_app PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_app flutter_assemble) + +# === Flutter tool backend === +# _phony_ is a non-existent file to force this command to run every time, +# since currently there's no way to get a full input/output list from the +# flutter tool. +set(PHONY_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/_phony_") +set_source_files_properties("${PHONY_OUTPUT}" PROPERTIES SYMBOLIC TRUE) +add_custom_command( + OUTPUT ${FLUTTER_LIBRARY} ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} + ${PHONY_OUTPUT} + COMMAND ${CMAKE_COMMAND} -E env + ${FLUTTER_TOOL_ENVIRONMENT} + "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.bat" + ${FLUTTER_TARGET_PLATFORM} $ + VERBATIM +) +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} +) diff --git a/example/windows/flutter/generated_plugin_registrant.cc b/example/windows/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..d5acadb305 --- /dev/null +++ b/example/windows/flutter/generated_plugin_registrant.cc @@ -0,0 +1,17 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include +#include + +void RegisterPlugins(flutter::PluginRegistry* registry) { + FlutterWebRTCPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); + PermissionHandlerWindowsPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("PermissionHandlerWindowsPlugin")); +} diff --git a/example/windows/flutter/generated_plugin_registrant.h b/example/windows/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..dc139d85a9 --- /dev/null +++ b/example/windows/flutter/generated_plugin_registrant.h @@ -0,0 +1,15 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void RegisterPlugins(flutter::PluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ diff --git a/example/windows/flutter/generated_plugins.cmake b/example/windows/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..cb004cdc57 --- /dev/null +++ b/example/windows/flutter/generated_plugins.cmake @@ -0,0 +1,25 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc + permission_handler_windows +) + +list(APPEND FLUTTER_FFI_PLUGIN_LIST +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/windows plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) + +foreach(ffi_plugin ${FLUTTER_FFI_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${ffi_plugin}/windows plugins/${ffi_plugin}) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${ffi_plugin}_bundled_libraries}) +endforeach(ffi_plugin) diff --git a/example/windows/runner/CMakeLists.txt b/example/windows/runner/CMakeLists.txt new file mode 100644 index 0000000000..394917c053 --- /dev/null +++ b/example/windows/runner/CMakeLists.txt @@ -0,0 +1,40 @@ +cmake_minimum_required(VERSION 3.14) +project(runner LANGUAGES CXX) + +# Define the application target. To change its name, change BINARY_NAME in the +# top-level CMakeLists.txt, not the value here, or `flutter run` will no longer +# work. +# +# Any new source files that you add to the application should be added here. +add_executable(${BINARY_NAME} WIN32 + "flutter_window.cpp" + "main.cpp" + "utils.cpp" + "win32_window.cpp" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" + "Runner.rc" + "runner.exe.manifest" +) + +# Apply the standard set of build settings. This can be removed for applications +# that need different build settings. +apply_standard_settings(${BINARY_NAME}) + +# Add preprocessor definitions for the build version. +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION=\"${FLUTTER_VERSION}\"") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_MAJOR=${FLUTTER_VERSION_MAJOR}") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_MINOR=${FLUTTER_VERSION_MINOR}") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_PATCH=${FLUTTER_VERSION_PATCH}") +target_compile_definitions(${BINARY_NAME} PRIVATE "FLUTTER_VERSION_BUILD=${FLUTTER_VERSION_BUILD}") + +# Disable Windows macros that collide with C++ standard library functions. +target_compile_definitions(${BINARY_NAME} PRIVATE "NOMINMAX") + +# Add dependency libraries and include directories. Add any application-specific +# dependencies here. +target_link_libraries(${BINARY_NAME} PRIVATE flutter flutter_wrapper_app) +target_link_libraries(${BINARY_NAME} PRIVATE "dwmapi.lib") +target_include_directories(${BINARY_NAME} PRIVATE "${CMAKE_SOURCE_DIR}") + +# Run the Flutter tool portions of the build. This must not be removed. +add_dependencies(${BINARY_NAME} flutter_assemble) diff --git a/example/windows/runner/Runner.rc b/example/windows/runner/Runner.rc new file mode 100644 index 0000000000..b82215f0e6 --- /dev/null +++ b/example/windows/runner/Runner.rc @@ -0,0 +1,121 @@ +// Microsoft Visual C++ generated resource script. +// +#pragma code_page(65001) +#include "resource.h" + +#define APSTUDIO_READONLY_SYMBOLS +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 2 resource. +// +#include "winres.h" + +///////////////////////////////////////////////////////////////////////////// +#undef APSTUDIO_READONLY_SYMBOLS + +///////////////////////////////////////////////////////////////////////////// +// English (United States) resources + +#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU) +LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US + +#ifdef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// TEXTINCLUDE +// + +1 TEXTINCLUDE +BEGIN + "resource.h\0" +END + +2 TEXTINCLUDE +BEGIN + "#include ""winres.h""\r\n" + "\0" +END + +3 TEXTINCLUDE +BEGIN + "\r\n" + "\0" +END + +#endif // APSTUDIO_INVOKED + + +///////////////////////////////////////////////////////////////////////////// +// +// Icon +// + +// Icon with lowest ID value placed first to ensure application icon +// remains consistent on all systems. +IDI_APP_ICON ICON "resources\\app_icon.ico" + + +///////////////////////////////////////////////////////////////////////////// +// +// Version +// + +#if defined(FLUTTER_VERSION_MAJOR) && defined(FLUTTER_VERSION_MINOR) && defined(FLUTTER_VERSION_PATCH) && defined(FLUTTER_VERSION_BUILD) +#define VERSION_AS_NUMBER FLUTTER_VERSION_MAJOR,FLUTTER_VERSION_MINOR,FLUTTER_VERSION_PATCH,FLUTTER_VERSION_BUILD +#else +#define VERSION_AS_NUMBER 1,0,0,0 +#endif + +#if defined(FLUTTER_VERSION) +#define VERSION_AS_STRING FLUTTER_VERSION +#else +#define VERSION_AS_STRING "1.0.0" +#endif + +VS_VERSION_INFO VERSIONINFO + FILEVERSION VERSION_AS_NUMBER + PRODUCTVERSION VERSION_AS_NUMBER + FILEFLAGSMASK VS_FFI_FILEFLAGSMASK +#ifdef _DEBUG + FILEFLAGS VS_FF_DEBUG +#else + FILEFLAGS 0x0L +#endif + FILEOS VOS__WINDOWS32 + FILETYPE VFT_APP + FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "040904e4" + BEGIN + VALUE "CompanyName", "com.cloudwebrtc.flutter-flutter-example" "\0" + VALUE "FileDescription", "flutter_webrtc_example" "\0" + VALUE "FileVersion", VERSION_AS_STRING "\0" + VALUE "InternalName", "flutter_webrtc_example" "\0" + VALUE "LegalCopyright", "Copyright (C) 2023 com.cloudwebrtc.flutter-flutter-example. All rights reserved." "\0" + VALUE "OriginalFilename", "flutter_webrtc_example.exe" "\0" + VALUE "ProductName", "flutter_webrtc_example" "\0" + VALUE "ProductVersion", VERSION_AS_STRING "\0" + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x409, 1252 + END +END + +#endif // English (United States) resources +///////////////////////////////////////////////////////////////////////////// + + + +#ifndef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 3 resource. +// + + +///////////////////////////////////////////////////////////////////////////// +#endif // not APSTUDIO_INVOKED diff --git a/example/windows/runner/flutter_window.cpp b/example/windows/runner/flutter_window.cpp new file mode 100644 index 0000000000..955ee3038f --- /dev/null +++ b/example/windows/runner/flutter_window.cpp @@ -0,0 +1,71 @@ +#include "flutter_window.h" + +#include + +#include "flutter/generated_plugin_registrant.h" + +FlutterWindow::FlutterWindow(const flutter::DartProject& project) + : project_(project) {} + +FlutterWindow::~FlutterWindow() {} + +bool FlutterWindow::OnCreate() { + if (!Win32Window::OnCreate()) { + return false; + } + + RECT frame = GetClientArea(); + + // The size here must match the window dimensions to avoid unnecessary surface + // creation / destruction in the startup path. + flutter_controller_ = std::make_unique( + frame.right - frame.left, frame.bottom - frame.top, project_); + // Ensure that basic setup of the controller was successful. + if (!flutter_controller_->engine() || !flutter_controller_->view()) { + return false; + } + RegisterPlugins(flutter_controller_->engine()); + SetChildContent(flutter_controller_->view()->GetNativeWindow()); + + flutter_controller_->engine()->SetNextFrameCallback([&]() { + this->Show(); + }); + + // Flutter can complete the first frame before the "show window" callback is + // registered. The following call ensures a frame is pending to ensure the + // window is shown. It is a no-op if the first frame hasn't completed yet. + flutter_controller_->ForceRedraw(); + + return true; +} + +void FlutterWindow::OnDestroy() { + if (flutter_controller_) { + flutter_controller_ = nullptr; + } + + Win32Window::OnDestroy(); +} + +LRESULT +FlutterWindow::MessageHandler(HWND hwnd, UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + // Give Flutter, including plugins, an opportunity to handle window messages. + if (flutter_controller_) { + std::optional result = + flutter_controller_->HandleTopLevelWindowProc(hwnd, message, wparam, + lparam); + if (result) { + return *result; + } + } + + switch (message) { + case WM_FONTCHANGE: + flutter_controller_->engine()->ReloadSystemFonts(); + break; + } + + return Win32Window::MessageHandler(hwnd, message, wparam, lparam); +} diff --git a/example/windows/runner/flutter_window.h b/example/windows/runner/flutter_window.h new file mode 100644 index 0000000000..6da0652f05 --- /dev/null +++ b/example/windows/runner/flutter_window.h @@ -0,0 +1,33 @@ +#ifndef RUNNER_FLUTTER_WINDOW_H_ +#define RUNNER_FLUTTER_WINDOW_H_ + +#include +#include + +#include + +#include "win32_window.h" + +// A window that does nothing but host a Flutter view. +class FlutterWindow : public Win32Window { + public: + // Creates a new FlutterWindow hosting a Flutter view running |project|. + explicit FlutterWindow(const flutter::DartProject& project); + virtual ~FlutterWindow(); + + protected: + // Win32Window: + bool OnCreate() override; + void OnDestroy() override; + LRESULT MessageHandler(HWND window, UINT const message, WPARAM const wparam, + LPARAM const lparam) noexcept override; + + private: + // The project to run. + flutter::DartProject project_; + + // The Flutter instance hosted by this window. + std::unique_ptr flutter_controller_; +}; + +#endif // RUNNER_FLUTTER_WINDOW_H_ diff --git a/example/windows/runner/main.cpp b/example/windows/runner/main.cpp new file mode 100644 index 0000000000..3ca3d9fb3b --- /dev/null +++ b/example/windows/runner/main.cpp @@ -0,0 +1,43 @@ +#include +#include +#include + +#include "flutter_window.h" +#include "utils.h" + +int APIENTRY wWinMain(_In_ HINSTANCE instance, _In_opt_ HINSTANCE prev, + _In_ wchar_t *command_line, _In_ int show_command) { + // Attach to console when present (e.g., 'flutter run') or create a + // new console when running with a debugger. + if (!::AttachConsole(ATTACH_PARENT_PROCESS) && ::IsDebuggerPresent()) { + CreateAndAttachConsole(); + } + + // Initialize COM, so that it is available for use in the library and/or + // plugins. + ::CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED); + + flutter::DartProject project(L"data"); + + std::vector command_line_arguments = + GetCommandLineArguments(); + + project.set_dart_entrypoint_arguments(std::move(command_line_arguments)); + + FlutterWindow window(project); + Win32Window::Point origin(10, 10); + Win32Window::Size size(1280, 720); + if (!window.Create(L"flutter_webrtc_example", origin, size)) { + return EXIT_FAILURE; + } + window.SetQuitOnClose(true); + + ::MSG msg; + while (::GetMessage(&msg, nullptr, 0, 0)) { + ::TranslateMessage(&msg); + ::DispatchMessage(&msg); + } + + ::CoUninitialize(); + return EXIT_SUCCESS; +} diff --git a/example/windows/runner/resource.h b/example/windows/runner/resource.h new file mode 100644 index 0000000000..66a65d1e4a --- /dev/null +++ b/example/windows/runner/resource.h @@ -0,0 +1,16 @@ +//{{NO_DEPENDENCIES}} +// Microsoft Visual C++ generated include file. +// Used by Runner.rc +// +#define IDI_APP_ICON 101 + +// Next default values for new objects +// +#ifdef APSTUDIO_INVOKED +#ifndef APSTUDIO_READONLY_SYMBOLS +#define _APS_NEXT_RESOURCE_VALUE 102 +#define _APS_NEXT_COMMAND_VALUE 40001 +#define _APS_NEXT_CONTROL_VALUE 1001 +#define _APS_NEXT_SYMED_VALUE 101 +#endif +#endif diff --git a/example/windows/runner/resources/app_icon.ico b/example/windows/runner/resources/app_icon.ico new file mode 100644 index 0000000000..c04e20caf6 Binary files /dev/null and b/example/windows/runner/resources/app_icon.ico differ diff --git a/example/windows/runner/runner.exe.manifest b/example/windows/runner/runner.exe.manifest new file mode 100644 index 0000000000..a42ea7687c --- /dev/null +++ b/example/windows/runner/runner.exe.manifest @@ -0,0 +1,20 @@ + + + + + PerMonitorV2 + + + + + + + + + + + + + + + diff --git a/example/windows/runner/utils.cpp b/example/windows/runner/utils.cpp new file mode 100644 index 0000000000..b2b08734db --- /dev/null +++ b/example/windows/runner/utils.cpp @@ -0,0 +1,65 @@ +#include "utils.h" + +#include +#include +#include +#include + +#include + +void CreateAndAttachConsole() { + if (::AllocConsole()) { + FILE *unused; + if (freopen_s(&unused, "CONOUT$", "w", stdout)) { + _dup2(_fileno(stdout), 1); + } + if (freopen_s(&unused, "CONOUT$", "w", stderr)) { + _dup2(_fileno(stdout), 2); + } + std::ios::sync_with_stdio(); + FlutterDesktopResyncOutputStreams(); + } +} + +std::vector GetCommandLineArguments() { + // Convert the UTF-16 command line arguments to UTF-8 for the Engine to use. + int argc; + wchar_t** argv = ::CommandLineToArgvW(::GetCommandLineW(), &argc); + if (argv == nullptr) { + return std::vector(); + } + + std::vector command_line_arguments; + + // Skip the first argument as it's the binary name. + for (int i = 1; i < argc; i++) { + command_line_arguments.push_back(Utf8FromUtf16(argv[i])); + } + + ::LocalFree(argv); + + return command_line_arguments; +} + +std::string Utf8FromUtf16(const wchar_t* utf16_string) { + if (utf16_string == nullptr) { + return std::string(); + } + int target_length = ::WideCharToMultiByte( + CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string, + -1, nullptr, 0, nullptr, nullptr) + -1; // remove the trailing null character + int input_length = (int)wcslen(utf16_string); + std::string utf8_string; + if (target_length <= 0 || target_length > utf8_string.max_size()) { + return utf8_string; + } + utf8_string.resize(target_length); + int converted_length = ::WideCharToMultiByte( + CP_UTF8, WC_ERR_INVALID_CHARS, utf16_string, + input_length, utf8_string.data(), target_length, nullptr, nullptr); + if (converted_length == 0) { + return std::string(); + } + return utf8_string; +} diff --git a/example/windows/runner/utils.h b/example/windows/runner/utils.h new file mode 100644 index 0000000000..3879d54755 --- /dev/null +++ b/example/windows/runner/utils.h @@ -0,0 +1,19 @@ +#ifndef RUNNER_UTILS_H_ +#define RUNNER_UTILS_H_ + +#include +#include + +// Creates a console for the process, and redirects stdout and stderr to +// it for both the runner and the Flutter library. +void CreateAndAttachConsole(); + +// Takes a null-terminated wchar_t* encoded in UTF-16 and returns a std::string +// encoded in UTF-8. Returns an empty std::string on failure. +std::string Utf8FromUtf16(const wchar_t* utf16_string); + +// Gets the command line arguments passed in as a std::vector, +// encoded in UTF-8. Returns an empty std::vector on failure. +std::vector GetCommandLineArguments(); + +#endif // RUNNER_UTILS_H_ diff --git a/example/windows/runner/win32_window.cpp b/example/windows/runner/win32_window.cpp new file mode 100644 index 0000000000..60608d0fe5 --- /dev/null +++ b/example/windows/runner/win32_window.cpp @@ -0,0 +1,288 @@ +#include "win32_window.h" + +#include +#include + +#include "resource.h" + +namespace { + +/// Window attribute that enables dark mode window decorations. +/// +/// Redefined in case the developer's machine has a Windows SDK older than +/// version 10.0.22000.0. +/// See: https://docs.microsoft.com/windows/win32/api/dwmapi/ne-dwmapi-dwmwindowattribute +#ifndef DWMWA_USE_IMMERSIVE_DARK_MODE +#define DWMWA_USE_IMMERSIVE_DARK_MODE 20 +#endif + +constexpr const wchar_t kWindowClassName[] = L"FLUTTER_RUNNER_WIN32_WINDOW"; + +/// Registry key for app theme preference. +/// +/// A value of 0 indicates apps should use dark mode. A non-zero or missing +/// value indicates apps should use light mode. +constexpr const wchar_t kGetPreferredBrightnessRegKey[] = + L"Software\\Microsoft\\Windows\\CurrentVersion\\Themes\\Personalize"; +constexpr const wchar_t kGetPreferredBrightnessRegValue[] = L"AppsUseLightTheme"; + +// The number of Win32Window objects that currently exist. +static int g_active_window_count = 0; + +using EnableNonClientDpiScaling = BOOL __stdcall(HWND hwnd); + +// Scale helper to convert logical scaler values to physical using passed in +// scale factor +int Scale(int source, double scale_factor) { + return static_cast(source * scale_factor); +} + +// Dynamically loads the |EnableNonClientDpiScaling| from the User32 module. +// This API is only needed for PerMonitor V1 awareness mode. +void EnableFullDpiSupportIfAvailable(HWND hwnd) { + HMODULE user32_module = LoadLibraryA("User32.dll"); + if (!user32_module) { + return; + } + auto enable_non_client_dpi_scaling = + reinterpret_cast( + GetProcAddress(user32_module, "EnableNonClientDpiScaling")); + if (enable_non_client_dpi_scaling != nullptr) { + enable_non_client_dpi_scaling(hwnd); + } + FreeLibrary(user32_module); +} + +} // namespace + +// Manages the Win32Window's window class registration. +class WindowClassRegistrar { + public: + ~WindowClassRegistrar() = default; + + // Returns the singleton registrar instance. + static WindowClassRegistrar* GetInstance() { + if (!instance_) { + instance_ = new WindowClassRegistrar(); + } + return instance_; + } + + // Returns the name of the window class, registering the class if it hasn't + // previously been registered. + const wchar_t* GetWindowClass(); + + // Unregisters the window class. Should only be called if there are no + // instances of the window. + void UnregisterWindowClass(); + + private: + WindowClassRegistrar() = default; + + static WindowClassRegistrar* instance_; + + bool class_registered_ = false; +}; + +WindowClassRegistrar* WindowClassRegistrar::instance_ = nullptr; + +const wchar_t* WindowClassRegistrar::GetWindowClass() { + if (!class_registered_) { + WNDCLASS window_class{}; + window_class.hCursor = LoadCursor(nullptr, IDC_ARROW); + window_class.lpszClassName = kWindowClassName; + window_class.style = CS_HREDRAW | CS_VREDRAW; + window_class.cbClsExtra = 0; + window_class.cbWndExtra = 0; + window_class.hInstance = GetModuleHandle(nullptr); + window_class.hIcon = + LoadIcon(window_class.hInstance, MAKEINTRESOURCE(IDI_APP_ICON)); + window_class.hbrBackground = 0; + window_class.lpszMenuName = nullptr; + window_class.lpfnWndProc = Win32Window::WndProc; + RegisterClass(&window_class); + class_registered_ = true; + } + return kWindowClassName; +} + +void WindowClassRegistrar::UnregisterWindowClass() { + UnregisterClass(kWindowClassName, nullptr); + class_registered_ = false; +} + +Win32Window::Win32Window() { + ++g_active_window_count; +} + +Win32Window::~Win32Window() { + --g_active_window_count; + Destroy(); +} + +bool Win32Window::Create(const std::wstring& title, + const Point& origin, + const Size& size) { + Destroy(); + + const wchar_t* window_class = + WindowClassRegistrar::GetInstance()->GetWindowClass(); + + const POINT target_point = {static_cast(origin.x), + static_cast(origin.y)}; + HMONITOR monitor = MonitorFromPoint(target_point, MONITOR_DEFAULTTONEAREST); + UINT dpi = FlutterDesktopGetDpiForMonitor(monitor); + double scale_factor = dpi / 96.0; + + HWND window = CreateWindow( + window_class, title.c_str(), WS_OVERLAPPEDWINDOW, + Scale(origin.x, scale_factor), Scale(origin.y, scale_factor), + Scale(size.width, scale_factor), Scale(size.height, scale_factor), + nullptr, nullptr, GetModuleHandle(nullptr), this); + + if (!window) { + return false; + } + + UpdateTheme(window); + + return OnCreate(); +} + +bool Win32Window::Show() { + return ShowWindow(window_handle_, SW_SHOWNORMAL); +} + +// static +LRESULT CALLBACK Win32Window::WndProc(HWND const window, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + if (message == WM_NCCREATE) { + auto window_struct = reinterpret_cast(lparam); + SetWindowLongPtr(window, GWLP_USERDATA, + reinterpret_cast(window_struct->lpCreateParams)); + + auto that = static_cast(window_struct->lpCreateParams); + EnableFullDpiSupportIfAvailable(window); + that->window_handle_ = window; + } else if (Win32Window* that = GetThisFromHandle(window)) { + return that->MessageHandler(window, message, wparam, lparam); + } + + return DefWindowProc(window, message, wparam, lparam); +} + +LRESULT +Win32Window::MessageHandler(HWND hwnd, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + switch (message) { + case WM_DESTROY: + window_handle_ = nullptr; + Destroy(); + if (quit_on_close_) { + PostQuitMessage(0); + } + return 0; + + case WM_DPICHANGED: { + auto newRectSize = reinterpret_cast(lparam); + LONG newWidth = newRectSize->right - newRectSize->left; + LONG newHeight = newRectSize->bottom - newRectSize->top; + + SetWindowPos(hwnd, nullptr, newRectSize->left, newRectSize->top, newWidth, + newHeight, SWP_NOZORDER | SWP_NOACTIVATE); + + return 0; + } + case WM_SIZE: { + RECT rect = GetClientArea(); + if (child_content_ != nullptr) { + // Size and position the child window. + MoveWindow(child_content_, rect.left, rect.top, rect.right - rect.left, + rect.bottom - rect.top, TRUE); + } + return 0; + } + + case WM_ACTIVATE: + if (child_content_ != nullptr) { + SetFocus(child_content_); + } + return 0; + + case WM_DWMCOLORIZATIONCOLORCHANGED: + UpdateTheme(hwnd); + return 0; + } + + return DefWindowProc(window_handle_, message, wparam, lparam); +} + +void Win32Window::Destroy() { + OnDestroy(); + + if (window_handle_) { + DestroyWindow(window_handle_); + window_handle_ = nullptr; + } + if (g_active_window_count == 0) { + WindowClassRegistrar::GetInstance()->UnregisterWindowClass(); + } +} + +Win32Window* Win32Window::GetThisFromHandle(HWND const window) noexcept { + return reinterpret_cast( + GetWindowLongPtr(window, GWLP_USERDATA)); +} + +void Win32Window::SetChildContent(HWND content) { + child_content_ = content; + SetParent(content, window_handle_); + RECT frame = GetClientArea(); + + MoveWindow(content, frame.left, frame.top, frame.right - frame.left, + frame.bottom - frame.top, true); + + SetFocus(child_content_); +} + +RECT Win32Window::GetClientArea() { + RECT frame; + GetClientRect(window_handle_, &frame); + return frame; +} + +HWND Win32Window::GetHandle() { + return window_handle_; +} + +void Win32Window::SetQuitOnClose(bool quit_on_close) { + quit_on_close_ = quit_on_close; +} + +bool Win32Window::OnCreate() { + // No-op; provided for subclasses. + return true; +} + +void Win32Window::OnDestroy() { + // No-op; provided for subclasses. +} + +void Win32Window::UpdateTheme(HWND const window) { + DWORD light_mode; + DWORD light_mode_size = sizeof(light_mode); + LSTATUS result = RegGetValue(HKEY_CURRENT_USER, kGetPreferredBrightnessRegKey, + kGetPreferredBrightnessRegValue, + RRF_RT_REG_DWORD, nullptr, &light_mode, + &light_mode_size); + + if (result == ERROR_SUCCESS) { + BOOL enable_dark_mode = light_mode == 0; + DwmSetWindowAttribute(window, DWMWA_USE_IMMERSIVE_DARK_MODE, + &enable_dark_mode, sizeof(enable_dark_mode)); + } +} diff --git a/example/windows/runner/win32_window.h b/example/windows/runner/win32_window.h new file mode 100644 index 0000000000..e901dde684 --- /dev/null +++ b/example/windows/runner/win32_window.h @@ -0,0 +1,102 @@ +#ifndef RUNNER_WIN32_WINDOW_H_ +#define RUNNER_WIN32_WINDOW_H_ + +#include + +#include +#include +#include + +// A class abstraction for a high DPI-aware Win32 Window. Intended to be +// inherited from by classes that wish to specialize with custom +// rendering and input handling +class Win32Window { + public: + struct Point { + unsigned int x; + unsigned int y; + Point(unsigned int x, unsigned int y) : x(x), y(y) {} + }; + + struct Size { + unsigned int width; + unsigned int height; + Size(unsigned int width, unsigned int height) + : width(width), height(height) {} + }; + + Win32Window(); + virtual ~Win32Window(); + + // Creates a win32 window with |title| that is positioned and sized using + // |origin| and |size|. New windows are created on the default monitor. Window + // sizes are specified to the OS in physical pixels, hence to ensure a + // consistent size this function will scale the inputted width and height as + // as appropriate for the default monitor. The window is invisible until + // |Show| is called. Returns true if the window was created successfully. + bool Create(const std::wstring& title, const Point& origin, const Size& size); + + // Show the current window. Returns true if the window was successfully shown. + bool Show(); + + // Release OS resources associated with window. + void Destroy(); + + // Inserts |content| into the window tree. + void SetChildContent(HWND content); + + // Returns the backing Window handle to enable clients to set icon and other + // window properties. Returns nullptr if the window has been destroyed. + HWND GetHandle(); + + // If true, closing this window will quit the application. + void SetQuitOnClose(bool quit_on_close); + + // Return a RECT representing the bounds of the current client area. + RECT GetClientArea(); + + protected: + // Processes and route salient window messages for mouse handling, + // size change and DPI. Delegates handling of these to member overloads that + // inheriting classes can handle. + virtual LRESULT MessageHandler(HWND window, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept; + + // Called when CreateAndShow is called, allowing subclass window-related + // setup. Subclasses should return false if setup fails. + virtual bool OnCreate(); + + // Called when Destroy is called. + virtual void OnDestroy(); + + private: + friend class WindowClassRegistrar; + + // OS callback called by message pump. Handles the WM_NCCREATE message which + // is passed when the non-client area is being created and enables automatic + // non-client DPI scaling so that the non-client area automatically + // responds to changes in DPI. All other messages are handled by + // MessageHandler. + static LRESULT CALLBACK WndProc(HWND const window, + UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept; + + // Retrieves a class instance pointer for |window| + static Win32Window* GetThisFromHandle(HWND const window) noexcept; + + // Update the window frame's theme to match the system theme. + static void UpdateTheme(HWND const window); + + bool quit_on_close_ = false; + + // window handle for top level window. + HWND window_handle_ = nullptr; + + // window handle for hosted content. + HWND child_content_ = nullptr; +}; + +#endif // RUNNER_WIN32_WINDOW_H_ diff --git a/flutter-webrtc.code-workspace b/flutter-webrtc.code-workspace index f6ffe6a247..1ceb7bda28 100644 --- a/flutter-webrtc.code-workspace +++ b/flutter-webrtc.code-workspace @@ -8,5 +8,11 @@ "name": "example" } ], - "settings": {} + "settings": { + "java.configuration.updateBuildConfiguration": "disabled", + "clang-format.executable": "/usr/bin/clang-format", + "files.associations": { + "*.tcc": "cpp" + } + } } \ No newline at end of file diff --git a/format.sh b/format.sh new file mode 100755 index 0000000000..4b04029574 --- /dev/null +++ b/format.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +find . -type f -name "*.cc" -o -type f -name "*.h" -o -type f -name "*.m" -o -type f -name "*.mm" | xargs clang-format -style=file -i diff --git a/ios/Classes/ARDVideoDecoderFactory.h b/ios/Classes/ARDVideoDecoderFactory.h deleted file mode 100644 index bc8be99214..0000000000 --- a/ios/Classes/ARDVideoDecoderFactory.h +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import "WebRTC/RTCVideoCodecFactory.h" - -@interface ARDVideoDecoderFactory : NSObject - -@end diff --git a/ios/Classes/ARDVideoDecoderFactory.m b/ios/Classes/ARDVideoDecoderFactory.m deleted file mode 100644 index 172635ebb4..0000000000 --- a/ios/Classes/ARDVideoDecoderFactory.m +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "ARDVideoDecoderFactory.h" - -#import "WebRTC/RTCVideoCodecH264.h" -#import "WebRTC/RTCVideoDecoderVP8.h" -#import "WebRTC/RTCVideoDecoderVP9.h" - -@implementation ARDVideoDecoderFactory - -- (id)createDecoder:(RTCVideoCodecInfo *)info { - if ([info.name isEqualToString:@"H264"]) { - return [[RTCVideoDecoderH264 alloc] init]; - } else if ([info.name isEqualToString:@"VP8"]) { - return [RTCVideoDecoderVP8 vp8Decoder]; - } else if ([info.name isEqualToString:@"VP9"]) { - return [RTCVideoDecoderVP9 vp9Decoder]; - } - - return nil; -} - -- (NSArray *)supportedCodecs { - return @[ - [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil], - [[RTCVideoCodecInfo alloc] initWithName:@"VP8" parameters:nil], - [[RTCVideoCodecInfo alloc] initWithName:@"VP9" parameters:nil] - ]; -} - -@end diff --git a/ios/Classes/ARDVideoEncoderFactory.h b/ios/Classes/ARDVideoEncoderFactory.h deleted file mode 100644 index 9927ce4161..0000000000 --- a/ios/Classes/ARDVideoEncoderFactory.h +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import "WebRTC/RTCVideoCodecFactory.h" - -@interface ARDVideoEncoderFactory : NSObject - -@end diff --git a/ios/Classes/ARDVideoEncoderFactory.m b/ios/Classes/ARDVideoEncoderFactory.m deleted file mode 100644 index a895011a72..0000000000 --- a/ios/Classes/ARDVideoEncoderFactory.m +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "ARDVideoEncoderFactory.h" - -#import "WebRTC/RTCVideoCodecH264.h" -#import "WebRTC/RTCVideoEncoderVP8.h" -#import "WebRTC/RTCVideoEncoderVP9.h" - -static NSString *kLevel31ConstrainedHigh = @"640c1f"; -static NSString *kLevel31ConstrainedBaseline = @"42e01f"; - -@implementation ARDVideoEncoderFactory - -- (id)createEncoder:(RTCVideoCodecInfo *)info { - if ([info.name isEqualToString:@"H264"]) { - return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info]; - } else if ([info.name isEqualToString:@"VP8"]) { - return [RTCVideoEncoderVP8 vp8Encoder]; - } else if ([info.name isEqualToString:@"VP9"]) { - return [RTCVideoEncoderVP9 vp9Encoder]; - } - - return nil; -} - -- (NSArray *)supportedCodecs { - NSMutableArray *codecs = [NSMutableArray array]; - - NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kLevel31ConstrainedHigh, - @"level-asymmetry-allowed" : @"1", - @"packetization-mode" : @"1", - }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:constrainedHighParams]; - [codecs addObject:constrainedHighInfo]; - - NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kLevel31ConstrainedBaseline, - @"level-asymmetry-allowed" : @"1", - @"packetization-mode" : @"1", - }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:constrainedBaselineParams]; - [codecs addObject:constrainedBaselineInfo]; - - RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:@"VP8" parameters:nil]; - [codecs addObject:vp8Info]; - - RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:@"VP9" parameters:nil]; - [codecs addObject:vp9Info]; - - return [codecs copy]; -} - -@end diff --git a/ios/Classes/AudioManager.h b/ios/Classes/AudioManager.h new file mode 120000 index 0000000000..1c21c3e586 --- /dev/null +++ b/ios/Classes/AudioManager.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.h \ No newline at end of file diff --git a/ios/Classes/AudioManager.m b/ios/Classes/AudioManager.m new file mode 120000 index 0000000000..bbcfb519ed --- /dev/null +++ b/ios/Classes/AudioManager.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.m \ No newline at end of file diff --git a/ios/Classes/AudioProcessingAdapter.h b/ios/Classes/AudioProcessingAdapter.h new file mode 120000 index 0000000000..f3048db2f1 --- /dev/null +++ b/ios/Classes/AudioProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.h \ No newline at end of file diff --git a/ios/Classes/AudioProcessingAdapter.m b/ios/Classes/AudioProcessingAdapter.m new file mode 120000 index 0000000000..803efdda50 --- /dev/null +++ b/ios/Classes/AudioProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.m \ No newline at end of file diff --git a/ios/Classes/AudioUtils.h b/ios/Classes/AudioUtils.h new file mode 120000 index 0000000000..efc6c758c8 --- /dev/null +++ b/ios/Classes/AudioUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.h \ No newline at end of file diff --git a/ios/Classes/AudioUtils.m b/ios/Classes/AudioUtils.m new file mode 120000 index 0000000000..5023efe9fd --- /dev/null +++ b/ios/Classes/AudioUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.m \ No newline at end of file diff --git a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h new file mode 100644 index 0000000000..cefdbfcb40 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h @@ -0,0 +1,25 @@ +// +// FlutterBroadcastScreenCapturer.h +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#import +#import +NS_ASSUME_NONNULL_BEGIN + +extern NSString* const kRTCScreensharingSocketFD; +extern NSString* const kRTCAppGroupIdentifier; +extern NSString* const kRTCScreenSharingExtension; + +@class FlutterSocketConnectionFrameReader; + +@interface FlutterBroadcastScreenCapturer : RTCVideoCapturer +- (void)startCapture; +- (void)stopCapture; +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m new file mode 100644 index 0000000000..147207a1eb --- /dev/null +++ b/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m @@ -0,0 +1,69 @@ +// +// FlutterBroadcastScreenCapturer.m +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#import "FlutterBroadcastScreenCapturer.h" +#import "FlutterSocketConnection.h" +#import "FlutterSocketConnectionFrameReader.h" + +NSString* const kRTCScreensharingSocketFD = @"rtc_SSFD"; +NSString* const kRTCAppGroupIdentifier = @"RTCAppGroupIdentifier"; +NSString* const kRTCScreenSharingExtension = @"RTCScreenSharingExtension"; + +@interface FlutterBroadcastScreenCapturer () + +@property(nonatomic, retain) FlutterSocketConnectionFrameReader* capturer; + +@end + +@interface FlutterBroadcastScreenCapturer (Private) + +@property(nonatomic, readonly) NSString* appGroupIdentifier; + +@end + +@implementation FlutterBroadcastScreenCapturer + +- (void)startCapture { + if (!self.appGroupIdentifier) { + return; + } + + NSString* socketFilePath = [self filePathForApplicationGroupIdentifier:self.appGroupIdentifier]; + FlutterSocketConnectionFrameReader* frameReader = + [[FlutterSocketConnectionFrameReader alloc] initWithDelegate:self.delegate]; + FlutterSocketConnection* connection = + [[FlutterSocketConnection alloc] initWithFilePath:socketFilePath]; + self.capturer = frameReader; + [self.capturer startCaptureWithConnection:connection]; +} + +- (void)stopCapture { + [self.capturer stopCapture]; +} +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if (completionHandler != nil) { + completionHandler(); + } +} +// MARK: Private Methods + +- (NSString*)appGroupIdentifier { + NSDictionary* infoDictionary = [[NSBundle mainBundle] infoDictionary]; + return infoDictionary[kRTCAppGroupIdentifier]; +} + +- (NSString*)filePathForApplicationGroupIdentifier:(nonnull NSString*)identifier { + NSURL* sharedContainer = + [[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:identifier]; + NSString* socketFilePath = + [[sharedContainer URLByAppendingPathComponent:kRTCScreensharingSocketFD] path]; + + return socketFilePath; +} + +@end diff --git a/ios/Classes/Broadcast/FlutterSocketConnection.h b/ios/Classes/Broadcast/FlutterSocketConnection.h new file mode 100644 index 0000000000..879e4e3202 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnection.h @@ -0,0 +1,20 @@ +// +// FlutterSocketConnection.h +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 08/01/2021. +// + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface FlutterSocketConnection : NSObject + +- (instancetype)initWithFilePath:(nonnull NSString*)filePath; +- (void)openWithStreamDelegate:(id)streamDelegate; +- (void)close; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/Classes/Broadcast/FlutterSocketConnection.m b/ios/Classes/Broadcast/FlutterSocketConnection.m new file mode 100644 index 0000000000..4a0cbe3797 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnection.m @@ -0,0 +1,157 @@ +// +// FlutterSocketConnection.m +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 08/01/2021. +// + +#include +#include + +#import "FlutterSocketConnection.h" + +@interface FlutterSocketConnection () + +@property(nonatomic, assign) int serverSocket; +@property(nonatomic, strong) dispatch_source_t listeningSource; + +@property(nonatomic, strong) NSThread* networkThread; + +@property(nonatomic, strong) NSInputStream* inputStream; +@property(nonatomic, strong) NSOutputStream* outputStream; + +@end + +@implementation FlutterSocketConnection + +- (instancetype)initWithFilePath:(nonnull NSString*)filePath { + self = [super init]; + + [self setupNetworkThread]; + + self.serverSocket = socket(AF_UNIX, SOCK_STREAM, 0); + if (self.serverSocket < 0) { + NSLog(@"failure creating socket"); + return nil; + } + + if (![self setupSocketWithFileAtPath:filePath]) { + close(self.serverSocket); + return nil; + } + + return self; +} + +- (void)openWithStreamDelegate:(id)streamDelegate { + int status = listen(self.serverSocket, 10); + if (status < 0) { + NSLog(@"failure: socket listening"); + return; + } + + dispatch_source_t listeningSource = + dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, self.serverSocket, 0, NULL); + dispatch_source_set_event_handler(listeningSource, ^{ + int clientSocket = accept(self.serverSocket, NULL, NULL); + if (clientSocket < 0) { + NSLog(@"failure accepting connection"); + return; + } + + CFReadStreamRef readStream; + CFWriteStreamRef writeStream; + + CFStreamCreatePairWithSocket(kCFAllocatorDefault, clientSocket, &readStream, &writeStream); + + self.inputStream = (__bridge_transfer NSInputStream*)readStream; + self.inputStream.delegate = streamDelegate; + [self.inputStream setProperty:@"kCFBooleanTrue" + forKey:@"kCFStreamPropertyShouldCloseNativeSocket"]; + + self.outputStream = (__bridge_transfer NSOutputStream*)writeStream; + [self.outputStream setProperty:@"kCFBooleanTrue" + forKey:@"kCFStreamPropertyShouldCloseNativeSocket"]; + + [self.networkThread start]; + [self performSelector:@selector(scheduleStreams) + onThread:self.networkThread + withObject:nil + waitUntilDone:true]; + + [self.inputStream open]; + [self.outputStream open]; + }); + + self.listeningSource = listeningSource; + dispatch_resume(listeningSource); +} + +- (void)close { + if (![self.networkThread isExecuting]) { + return; + } + + [self performSelector:@selector(unscheduleStreams) + onThread:self.networkThread + withObject:nil + waitUntilDone:true]; + + self.inputStream.delegate = nil; + self.outputStream.delegate = nil; + + [self.inputStream close]; + [self.outputStream close]; + + [self.networkThread cancel]; + + dispatch_source_cancel(self.listeningSource); + close(self.serverSocket); +} + +// MARK: - Private Methods + +- (void)setupNetworkThread { + self.networkThread = [[NSThread alloc] initWithBlock:^{ + do { + @autoreleasepool { + [[NSRunLoop currentRunLoop] run]; + } + } while (![NSThread currentThread].isCancelled); + }]; + self.networkThread.qualityOfService = NSQualityOfServiceUserInitiated; +} + +- (BOOL)setupSocketWithFileAtPath:(NSString*)filePath { + struct sockaddr_un addr; + memset(&addr, 0, sizeof(addr)); + addr.sun_family = AF_UNIX; + + if (filePath.length > sizeof(addr.sun_path)) { + NSLog(@"failure: path too long"); + return false; + } + + unlink(filePath.UTF8String); + strncpy(addr.sun_path, filePath.UTF8String, sizeof(addr.sun_path) - 1); + + int status = bind(self.serverSocket, (struct sockaddr*)&addr, sizeof(addr)); + if (status < 0) { + NSLog(@"failure: socket binding"); + return false; + } + + return true; +} + +- (void)scheduleStreams { + [self.inputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; + [self.outputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; +} + +- (void)unscheduleStreams { + [self.inputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; + [self.outputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes]; +} + +@end diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h new file mode 100644 index 0000000000..230616e207 --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h @@ -0,0 +1,23 @@ +// +// FlutterSocketConnectionFrameReader.h +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +@class FlutterSocketConnection; + +@interface FlutterSocketConnectionFrameReader : RTCVideoCapturer + +- (instancetype)initWithDelegate:(__weak id)delegate; +- (void)startCaptureWithConnection:(nonnull FlutterSocketConnection*)connection; +- (void)stopCapture; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m new file mode 100644 index 0000000000..7485a3492a --- /dev/null +++ b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m @@ -0,0 +1,259 @@ +// +// FlutterSocketConnectionFrameReader.m +// RCTWebRTC +// +// Created by Alex-Dan Bumbu on 06/01/2021. +// + +#include + +#import +#import +#import + +#import "FlutterSocketConnection.h" +#import "FlutterSocketConnectionFrameReader.h" + +const NSUInteger kMaxReadLength = 10 * 1024; + +@interface Message : NSObject + +@property(nonatomic, assign, readonly) CVImageBufferRef imageBuffer; +@property(nonatomic, copy, nullable) void (^didComplete)(BOOL succes, Message* message); + +- (NSInteger)appendBytes:(UInt8*)buffer length:(NSUInteger)length; + +@end + +@interface Message () + +@property(nonatomic, assign) CVImageBufferRef imageBuffer; +@property(nonatomic, assign) int imageOrientation; +@property(nonatomic, assign) CFHTTPMessageRef framedMessage; + +@end + +@implementation Message + +- (instancetype)init { + self = [super init]; + if (self) { + self.imageBuffer = NULL; + } + + return self; +} + +- (void)dealloc { + CVPixelBufferRelease(_imageBuffer); +} + +/** Returns the amount of missing bytes to complete the message, or -1 when not enough bytes were + * provided to compute the message length */ +- (NSInteger)appendBytes:(UInt8*)buffer length:(NSUInteger)length { + if (!_framedMessage) { + _framedMessage = CFHTTPMessageCreateEmpty(kCFAllocatorDefault, false); + } + + CFHTTPMessageAppendBytes(_framedMessage, buffer, length); + if (!CFHTTPMessageIsHeaderComplete(_framedMessage)) { + return -1; + } + + NSInteger contentLength = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Content-Length")) integerValue]; + NSInteger bodyLength = + (NSInteger)[CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)) length]; + + NSInteger missingBytesCount = contentLength - bodyLength; + if (missingBytesCount == 0) { + BOOL success = [self unwrapMessage:self.framedMessage]; + self.didComplete(success, self); + + CFRelease(self.framedMessage); + self.framedMessage = NULL; + } + + return missingBytesCount; +} + +// MARK: Private Methods + +- (CIContext*)imageContext { + // Initializing a CIContext object is costly, so we use a singleton instead + static CIContext* imageContext = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + imageContext = [[CIContext alloc] initWithOptions:nil]; + }); + + return imageContext; +} + +- (BOOL)unwrapMessage:(CFHTTPMessageRef)framedMessage { + size_t width = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Buffer-Width")) integerValue]; + size_t height = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Buffer-Height")) integerValue]; + _imageOrientation = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue( + _framedMessage, (__bridge CFStringRef) @"Buffer-Orientation")) intValue]; + + NSData* messageData = CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)); + + // Copy the pixel buffer + CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, + kCVPixelFormatType_32BGRA, NULL, &_imageBuffer); + if (status != kCVReturnSuccess) { + NSLog(@"CVPixelBufferCreate failed"); + return false; + } + + [self copyImageData:messageData toPixelBuffer:&_imageBuffer]; + + return true; +} + +- (void)copyImageData:(NSData*)data toPixelBuffer:(CVPixelBufferRef*)pixelBuffer { + CVPixelBufferLockBaseAddress(*pixelBuffer, 0); + + CIImage* image = [CIImage imageWithData:data]; + [self.imageContext render:image toCVPixelBuffer:*pixelBuffer]; + + CVPixelBufferUnlockBaseAddress(*pixelBuffer, 0); +} + +@end + +// MARK: - + +@interface FlutterSocketConnectionFrameReader () + +@property(nonatomic, strong) FlutterSocketConnection* connection; +@property(nonatomic, strong) Message* message; + +@end + +@implementation FlutterSocketConnectionFrameReader { + mach_timebase_info_data_t _timebaseInfo; + NSInteger _readLength; + int64_t _startTimeStampNs; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + self = [super initWithDelegate:delegate]; + if (self) { + mach_timebase_info(&_timebaseInfo); + } + + return self; +} + +- (void)startCaptureWithConnection:(FlutterSocketConnection*)connection { + _startTimeStampNs = -1; + + self.connection = connection; + self.message = nil; + + [self.connection openWithStreamDelegate:self]; +} + +- (void)stopCapture { + [self.connection close]; +} + +// MARK: Private Methods + +- (void)readBytesFromStream:(NSInputStream*)stream { + if (!stream.hasBytesAvailable) { + return; + } + + if (!self.message) { + self.message = [[Message alloc] init]; + _readLength = kMaxReadLength; + + __weak __typeof__(self) weakSelf = self; + self.message.didComplete = ^(BOOL success, Message* message) { + if (success) { + [weakSelf didCaptureVideoFrame:message.imageBuffer + withOrientation:message.imageOrientation]; + } + + weakSelf.message = nil; + }; + } + + uint8_t buffer[_readLength]; + NSInteger numberOfBytesRead = [stream read:buffer maxLength:_readLength]; + if (numberOfBytesRead < 0) { + NSLog(@"error reading bytes from stream"); + return; + } + + _readLength = [self.message appendBytes:buffer length:numberOfBytesRead]; + if (_readLength == -1 || _readLength > kMaxReadLength) { + _readLength = kMaxReadLength; + } +} + +- (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer + withOrientation:(CGImagePropertyOrientation)orientation { + int64_t currentTime = mach_absolute_time(); + int64_t currentTimeStampNs = currentTime * _timebaseInfo.numer / _timebaseInfo.denom; + + if (_startTimeStampNs < 0) { + _startTimeStampNs = currentTimeStampNs; + } + + RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t frameTimeStampNs = currentTimeStampNs - _startTimeStampNs; + + RTCVideoRotation rotation; + switch (orientation) { + case kCGImagePropertyOrientationLeft: + rotation = RTCVideoRotation_90; + break; + case kCGImagePropertyOrientationDown: + rotation = RTCVideoRotation_180; + break; + case kCGImagePropertyOrientationRight: + rotation = RTCVideoRotation_270; + break; + default: + rotation = RTCVideoRotation_0; + break; + } + + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:[rtcPixelBuffer toI420] + rotation:rotation + timeStampNs:frameTimeStampNs]; + + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end + +@implementation FlutterSocketConnectionFrameReader (NSStreamDelegate) + +- (void)stream:(NSStream*)aStream handleEvent:(NSStreamEvent)eventCode { + switch (eventCode) { + case NSStreamEventOpenCompleted: + NSLog(@"server stream open completed"); + break; + case NSStreamEventHasBytesAvailable: + [self readBytesFromStream:(NSInputStream*)aStream]; + break; + case NSStreamEventEndEncountered: + NSLog(@"server stream end encountered"); + [self stopCapture]; + break; + case NSStreamEventErrorOccurred: + NSLog(@"server stream error encountered: %@", aStream.streamError.localizedDescription); + break; + + default: + break; + } +} + +@end diff --git a/ios/Classes/CameraUtils.h b/ios/Classes/CameraUtils.h new file mode 120000 index 0000000000..a31c2baab2 --- /dev/null +++ b/ios/Classes/CameraUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.h \ No newline at end of file diff --git a/ios/Classes/CameraUtils.m b/ios/Classes/CameraUtils.m new file mode 120000 index 0000000000..336e1ea963 --- /dev/null +++ b/ios/Classes/CameraUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.m \ No newline at end of file diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink-Interface.h b/ios/Classes/FlutterRTCAudioSink-Interface.h new file mode 120000 index 0000000000..940c06d646 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink-Interface.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.h b/ios/Classes/FlutterRTCAudioSink.h new file mode 120000 index 0000000000..5242de9e22 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.mm b/ios/Classes/FlutterRTCAudioSink.mm new file mode 120000 index 0000000000..c15372c4ed --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.mm \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index ad78304a71..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,27 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index 033b1e16d3..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,152 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel && -1 != dataChannel.channelId) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"cloudwebrtc.com/WebRTC/dataChannelEvent%d", dataChannel.channelId] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(NSString *)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - NSData *bytes = [type isEqualToString:@"binary"] ? - [[NSData alloc] initWithBase64EncodedString:data options:0] : - [data dataUsingEncoding:NSUTF8StringEncoding]; - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": @(channel.channelId), - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - NSString *data; - if (buffer.isBinary) { - type = @"binary"; - data = [buffer.data base64EncodedStringWithOptions:0]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": @(channel.channelId), - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDesktopCapturer.h b/ios/Classes/FlutterRTCDesktopCapturer.h new file mode 120000 index 0000000000..eff4773160 --- /dev/null +++ b/ios/Classes/FlutterRTCDesktopCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDesktopCapturer.m b/ios/Classes/FlutterRTCDesktopCapturer.m new file mode 120000 index 0000000000..5388e628f4 --- /dev/null +++ b/ios/Classes/FlutterRTCDesktopCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCryptor.h b/ios/Classes/FlutterRTCFrameCryptor.h new file mode 120000 index 0000000000..ad3e0de33e --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCryptor.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCryptor.m b/ios/Classes/FlutterRTCFrameCryptor.m new file mode 120000 index 0000000000..bd62d3db56 --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCryptor.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.h b/ios/Classes/FlutterRTCMediaRecorder.h new file mode 120000 index 0000000000..31ca7e3b5f --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.m b/ios/Classes/FlutterRTCMediaRecorder.m new file mode 120000 index 0000000000..1c2b1bf1a8 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 6eea7fcf50..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,12 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; -@end - - diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 09bd40b660..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,448 +0,0 @@ -#import - -#import -#import -#import -#import -#import -#import -#import - -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ kRTCMediaConstraintsMinWidth : @"1280", - kRTCMediaConstraintsMinHeight : @"720", - kRTCMediaConstraintsMinFrameRate : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - position = AVCaptureDevicePositionUnspecified; - } - if (AVCaptureDevicePositionUnspecified != position) { - for (AVCaptureDevice *aVideoDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { - if (aVideoDevice.position == position) { - videoDevice = aVideoDevice; - break; - } - } - } - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - if (videoDevice) { - RTCMediaConstraints* finalConstraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:videoConstraints[@"mandatory"] optionalConstraints:nil]; - RTCAVFoundationVideoSource *videoSource = [self.peerConnectionFactory avFoundationVideoSourceWithConstraints:finalConstraints]; - // FIXME The effort above to find a videoDevice value which satisfies the - // specified constraints was pretty much wasted. Salvage facingMode for - // starters because it is kind of a common and hence important feature on - // a mobile device. - switch (videoDevice.position) { - case AVCaptureDevicePositionBack: - if (videoSource.canUseBackCamera) { - videoSource.useBackCamera = YES; - } - break; - case AVCaptureDevicePositionFront: - videoSource.useBackCamera = NO; - break; - } - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track -{ - if (track) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if ([source isKindOfClass:[RTCAVFoundationVideoSource class]]) { - RTCAVFoundationVideoSource *avSource = (RTCAVFoundationVideoSource *)source; - avSource.useBackCamera = !avSource.useBackCamera; - } - } -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -@end diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b0ef92f349..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,40 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - -- (RTCMediaConstraints *)parseMediaConstraints:(nonnull NSDictionary *)constraints; - -@end diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index 55acff1721..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,503 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"cloudwebrtc.com/WebRTC/dataChannelEvent%d", dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -@end - diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformView.h b/ios/Classes/FlutterRTCVideoPlatformView.h new file mode 100644 index 0000000000..01e1215ea8 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformView.h @@ -0,0 +1,17 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +@interface FlutterRTCVideoPlatformView : UIView + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +- (instancetype _Nonnull)initWithFrame:(CGRect)frame; + +- (void)setSize:(CGSize)size; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformView.m b/ios/Classes/FlutterRTCVideoPlatformView.m new file mode 100644 index 0000000000..2f44ea47d2 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformView.m @@ -0,0 +1,135 @@ +#import "FlutterRTCVideoPlatformView.h" + +@implementation FlutterRTCVideoPlatformView { + CGSize _videoSize; + AVSampleBufferDisplayLayer* _videoLayer; + CGSize _remoteVideoSize; + CATransform3D _bufferTransform; + RTCVideoRotation _lastVideoRotation; +} + +- (instancetype)initWithFrame:(CGRect)frame { + if (self = [super initWithFrame:frame]) { + _videoLayer = [[AVSampleBufferDisplayLayer alloc] init]; + _videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + _videoLayer.frame = CGRectZero; + _bufferTransform = CATransform3DIdentity; + _lastVideoRotation = RTCVideoRotation_0; + [self.layer addSublayer:_videoLayer]; + self.opaque = NO; + } + return self; +} + +- (void)layoutSubviews { + _videoLayer.frame = self.bounds; + [_videoLayer removeAllAnimations]; +} + +- (void)setSize:(CGSize)size { + _remoteVideoSize = size; +} + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + + CVPixelBufferRef pixelBuffer = nil; + if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBuffer = ((RTCCVPixelBuffer*)frame.buffer).pixelBuffer; + CFRetain(pixelBuffer); + } else if ([frame.buffer isKindOfClass:[RTCI420Buffer class]]) { + pixelBuffer = [self toCVPixelBuffer:frame]; + } + + if (_lastVideoRotation != frame.rotation) { + _bufferTransform = [self fromFrameRotation:frame.rotation]; + _videoLayer.transform = _bufferTransform; + [_videoLayer layoutIfNeeded]; + _lastVideoRotation = frame.rotation; + } + + CMSampleBufferRef sampleBuffer = [self sampleBufferFromPixelBuffer:pixelBuffer]; + if (sampleBuffer) { + if (@available(iOS 14.0, *)) { + if([_videoLayer requiresFlushToResumeDecoding]) { + [_videoLayer flushAndRemoveImage]; + } + } else { + // Fallback on earlier versions + } + [_videoLayer enqueueSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + } + + CFRelease(pixelBuffer); +} + +- (CVPixelBufferRef)toCVPixelBuffer:(RTCVideoFrame*)frame { + CVPixelBufferRef outputPixelBuffer; + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, frame.width, frame.height, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer); + id i420Buffer = (RTCI420Buffer*)frame.buffer; + + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); + return outputPixelBuffer; +} + +- (CMSampleBufferRef)sampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer { + CMSampleBufferRef sampleBuffer = NULL; + OSStatus err = noErr; + CMVideoFormatDescriptionRef formatDesc = NULL; + err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); + if (err != noErr) { + return nil; + } + CMSampleTimingInfo sampleTimingInfo = kCMTimingInfoInvalid; + err = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDesc, + &sampleTimingInfo, &sampleBuffer); + if (sampleBuffer) { + CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); + CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); + CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); + } + if (err != noErr) { + return nil; + } + formatDesc = nil; + return sampleBuffer; +} + +- (CATransform3D)fromFrameRotation:(RTCVideoRotation)rotation { + switch (rotation) { + case RTCVideoRotation_0: + return CATransform3DIdentity; + case RTCVideoRotation_90: + return CATransform3DMakeRotation(M_PI / 2.0, 0, 0, 1); + case RTCVideoRotation_180: + return CATransform3DMakeRotation(M_PI, 0, 0, 1); + case RTCVideoRotation_270: + return CATransform3DMakeRotation(-M_PI / 0, 0, 0, 1); + } + return CATransform3DIdentity; +} + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.h b/ios/Classes/FlutterRTCVideoPlatformViewController.h new file mode 100644 index 0000000000..cb15ed7b35 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewController.h @@ -0,0 +1,23 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +@interface FlutterRTCVideoPlatformViewController + : NSObject + +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) FlutterEventSink _Nonnull eventSink; +@property(nonatomic) int64_t viewId; +@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; + +- (instancetype _Nullable)initWithMessenger:(NSObject* _Nonnull)messenger + viewIdentifier:(int64_t)viewId + frame:(CGRect)frame; + +- (UIView* _Nonnull)view; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.m b/ios/Classes/FlutterRTCVideoPlatformViewController.m new file mode 100644 index 0000000000..1f227ee1d5 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewController.m @@ -0,0 +1,116 @@ +#import "FlutterRTCVideoPlatformViewController.h" +#import "FlutterRTCVideoPlatformView.h" +#import "FlutterWebRTCPlugin.h" + +@implementation FlutterRTCVideoPlatformViewController { + FlutterRTCVideoPlatformView* _videoView; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; + CGSize _renderSize; + RTCVideoRotation _rotation; +} + +@synthesize messenger = _messenger; +@synthesize eventSink = _eventSink; +@synthesize viewId = _viewId; + +- (instancetype)initWithMessenger:(NSObject*)messenger + viewIdentifier:(int64_t)viewId + frame:(CGRect)frame { + self = [super init]; + if (self) { + _isFirstFrameRendered = false; + _renderSize = CGSizeZero; + _rotation = -1; + _messenger = messenger; + _videoView = [[FlutterRTCVideoPlatformView alloc] initWithFrame:frame]; + _viewId = viewId; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/PlatformViewId%lld", viewId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + + return self; +} + +- (UIView*)view { + return _videoView; +} + +- (void)setVideoTrack:(RTCVideoTrack*)videoTrack { + RTCVideoTrack* oldValue = self.videoTrack; + if (oldValue == videoTrack) { + return; + } + _videoTrack = videoTrack; + _isFirstFrameRendered = false; + if (!oldValue) { + [oldValue removeRenderer:(id)self]; + _videoView.frame = CGRectZero; + } + if (videoTrack) { + [videoTrack addRenderer:(id)self]; + } +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame*)frame { + if (_renderSize.width != frame.width || _renderSize.height != frame.height || + !_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"didPlatformViewChangeVideoSize", + @"id" : @(self.viewId), + @"width" : @(frame.width), + @"height" : @(frame.height), + }); + } + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if (frame.rotation != _rotation || !_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"didPlatformViewChangeRotation", + @"id" : @(self.viewId), + @"rotation" : @(frame.rotation), + }); + } + _rotation = frame.rotation; + } + + if (!_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{@"event" : @"didFirstFrameRendered"}); + } + self->_isFirstFrameRendered = true; + } + + [_videoView renderFrame:frame]; +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + [_videoView setSize:size]; +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.h b/ios/Classes/FlutterRTCVideoPlatformViewFactory.h new file mode 100644 index 0000000000..55e43b944c --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewFactory.h @@ -0,0 +1,19 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#define FLutterRTCVideoPlatformViewFactoryID @"rtc_video_platform_view" + +@class FlutterRTCVideoPlatformViewController; + +@interface FLutterRTCVideoPlatformViewFactory : NSObject + +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; + +- (_Nonnull instancetype)initWithMessenger:(NSObject* _Nonnull)messenger; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.m b/ios/Classes/FlutterRTCVideoPlatformViewFactory.m new file mode 100644 index 0000000000..c70bd68ab4 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoPlatformViewFactory.m @@ -0,0 +1,34 @@ +#import "FlutterRTCVideoPlatformViewFactory.h" +#import "FlutterRTCVideoPlatformViewController.h" + +@implementation FLutterRTCVideoPlatformViewFactory { +} + +@synthesize messenger = _messenger; + +- (instancetype)initWithMessenger:(NSObject*)messenger { + self = [super init]; + if (self) { + _messenger = messenger; + self.renders = [NSMutableDictionary new]; + } + + return self; +} + +- (NSObject*)createArgsCodec { + return [FlutterStandardMessageCodec sharedInstance]; +} + +- (NSObject*)createWithFrame:(CGRect)frame + viewIdentifier:(int64_t)viewId + arguments:(id _Nullable)args { + FlutterRTCVideoPlatformViewController* render = + [[FlutterRTCVideoPlatformViewController alloc] initWithMessenger:_messenger + viewIdentifier:viewId + frame:frame]; + self.renders[@(viewId)] = render; + return render; +} + +@end diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index 1625e3d90e..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,33 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithSize:(CGSize)renderSize; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithSize:(CGSize)size - withTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view; - -@end diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 01ccee64fe..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,177 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" - -#import -#import -#import -#import - -#import "FlutterWebRTCPlugin.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _renderSize; - CGSize _frameSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithSize:(CGSize)renderSize - withTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _renderSize = renderSize; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"cloudwebrtc.com/WebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - //TODO: got a frame => scale to _renderSize => convert to BGRA32 pixelBufferRef - - [frame CopyI420BufferToCVPixelBuffer:_pixelBufferRef]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - NULL, &_pixelBufferRef); - } - - __weak FlutterRTCVideoRenderer *weakSelf = self; - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(size.width), - @"height": @(size.height), - }); - } - }); - _frameSize = size; -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithSize:(CGSize)size - withTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithSize:size withTextureRegistry:registry messenger:messenger]; -} - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view { - - RTCVideoTrack *videoTrack; - RTCMediaStream *stream = [self streamForId:streamId]; - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"No video stream for react tag: %@", streamId); - } - } else { - videoTrack = nil; - } - - view.videoTrack = videoTrack; -} - -@end - diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index 0c5e648937..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,24 +0,0 @@ -#import -#import - -#import -#import -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ -@property (nonatomic, strong) NSObject* messenger; - -- (RTCMediaStream*)streamForId:(NSString*)streamId; - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index d9121aa688..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,511 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" -#import "ARDVideoDecoderFactory.h" -#import "ARDVideoEncoderFactory.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - - - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"cloudwebrtc.com/WebRTC.Method" - binaryMessenger:[registrar messenger]]; - UIViewController *viewController = (UIViewController *)registrar.messenger; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - viewController:viewController - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - viewController:(UIViewController *)viewController - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - self.viewController = viewController; - } - - ARDVideoDecoderFactory *decoderFactory = [[ARDVideoDecoderFactory alloc] init]; - ARDVideoEncoderFactory *encoderFactory = [[ARDVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"cloudwebrtc.com/WebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - }else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - }else if([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* data = argsMap[@"data"]; - NSString* type = argsMap[@"type"]; - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - }else if([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - }else if([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if ([source isKindOfClass:[RTCAVFoundationVideoSource class]]) { - RTCAVFoundationVideoSource *avSource = (RTCAVFoundationVideoSource *)source; - [avSource Stop]; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - }else if([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - }else if([@"peerConnectionClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (!peerConnection) { - return; - } - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - result(nil); - }else if([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - CGFloat width = [argsMap[@"width"] floatValue]; - CGFloat height = [argsMap[@"height"] floatValue]; - FlutterRTCVideoRenderer* render = [self createWithSize:CGSizeMake(width, height) - withTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - }else if([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - }else if([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - if(render){ - [self setStreamId:streamId view:render]; - } - result(nil); - }else{ - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - return stream; -} - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - // TODO: Implement the rest of the RTCConfigure options ... - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -@end - diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/ios/Classes/LocalAudioTrack.h b/ios/Classes/LocalAudioTrack.h new file mode 120000 index 0000000000..421b56b2af --- /dev/null +++ b/ios/Classes/LocalAudioTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.h \ No newline at end of file diff --git a/ios/Classes/LocalAudioTrack.m b/ios/Classes/LocalAudioTrack.m new file mode 120000 index 0000000000..71fa724d15 --- /dev/null +++ b/ios/Classes/LocalAudioTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.m \ No newline at end of file diff --git a/ios/Classes/LocalTrack.h b/ios/Classes/LocalTrack.h new file mode 120000 index 0000000000..7d41789949 --- /dev/null +++ b/ios/Classes/LocalTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalTrack.h \ No newline at end of file diff --git a/ios/Classes/LocalVideoTrack.h b/ios/Classes/LocalVideoTrack.h new file mode 120000 index 0000000000..5069f7dd17 --- /dev/null +++ b/ios/Classes/LocalVideoTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.h \ No newline at end of file diff --git a/ios/Classes/LocalVideoTrack.m b/ios/Classes/LocalVideoTrack.m new file mode 120000 index 0000000000..182490a4fb --- /dev/null +++ b/ios/Classes/LocalVideoTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.m \ No newline at end of file diff --git a/ios/Classes/RTCAudioSource+Private.h b/ios/Classes/RTCAudioSource+Private.h new file mode 120000 index 0000000000..7ce3b77fd6 --- /dev/null +++ b/ios/Classes/RTCAudioSource+Private.h @@ -0,0 +1 @@ +../../common/darwin/Classes/RTCAudioSource+Private.h \ No newline at end of file diff --git a/ios/Classes/VideoProcessingAdapter.h b/ios/Classes/VideoProcessingAdapter.h new file mode 120000 index 0000000000..d93141230c --- /dev/null +++ b/ios/Classes/VideoProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.h \ No newline at end of file diff --git a/ios/Classes/VideoProcessingAdapter.m b/ios/Classes/VideoProcessingAdapter.m new file mode 120000 index 0000000000..c80ad1ca73 --- /dev/null +++ b/ios/Classes/VideoProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.m \ No newline at end of file diff --git a/ios/Classes/audio_sink_bridge.cpp b/ios/Classes/audio_sink_bridge.cpp new file mode 120000 index 0000000000..13215e8454 --- /dev/null +++ b/ios/Classes/audio_sink_bridge.cpp @@ -0,0 +1 @@ +../../common/darwin/Classes/audio_sink_bridge.cpp \ No newline at end of file diff --git a/ios/Classes/media_stream_interface.h b/ios/Classes/media_stream_interface.h new file mode 120000 index 0000000000..5810a86316 --- /dev/null +++ b/ios/Classes/media_stream_interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/media_stream_interface.h \ No newline at end of file diff --git a/ios/WebRTC.framework/Headers/RTCAVFoundationVideoSource.h b/ios/WebRTC.framework/Headers/RTCAVFoundationVideoSource.h deleted file mode 100644 index f2f0b154f5..0000000000 --- a/ios/WebRTC.framework/Headers/RTCAVFoundationVideoSource.h +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -@class AVCaptureSession; -@class RTCMediaConstraints; -@class RTCPeerConnectionFactory; - -NS_ASSUME_NONNULL_BEGIN - -/** - * DEPRECATED Use RTCCameraVideoCapturer instead. - * - * RTCAVFoundationVideoSource is a video source that uses - * webrtc::AVFoundationVideoCapturer. We do not currently provide a wrapper for - * that capturer because cricket::VideoCapturer is not ref counted and we cannot - * guarantee its lifetime. Instead, we expose its properties through the ref - * counted video source interface. - */ -RTC_EXPORT -@interface RTCAVFoundationVideoSource : RTCVideoSource - -- (instancetype)init NS_UNAVAILABLE; - -- (BOOL)IsRunning; - -- (void)Stop; - -/** - * Calling this function will cause frames to be scaled down to the - * requested resolution. Also, frames will be cropped to match the - * requested aspect ratio, and frames will be dropped to match the - * requested fps. The requested aspect ratio is orientation agnostic and - * will be adjusted to maintain the input orientation, so it doesn't - * matter if e.g. 1280x720 or 720x1280 is requested. - */ -- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps; - -/** Returns whether rear-facing camera is available for use. */ -@property(nonatomic, readonly) BOOL canUseBackCamera; - -/** Switches the camera being used (either front or back). */ -@property(nonatomic, assign) BOOL useBackCamera; - -/** Returns the active capture session. */ -@property(nonatomic, readonly) AVCaptureSession *captureSession; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCAudioSession.h b/ios/WebRTC.framework/Headers/RTCAudioSession.h deleted file mode 100644 index 354634ec39..0000000000 --- a/ios/WebRTC.framework/Headers/RTCAudioSession.h +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -extern NSString * const kRTCAudioSessionErrorDomain; -/** Method that requires lock was called without lock. */ -extern NSInteger const kRTCAudioSessionErrorLockRequired; -/** Unknown configuration error occurred. */ -extern NSInteger const kRTCAudioSessionErrorConfiguration; - -@class RTCAudioSession; -@class RTCAudioSessionConfiguration; - -// Surfaces AVAudioSession events. WebRTC will listen directly for notifications -// from AVAudioSession and handle them before calling these delegate methods, -// at which point applications can perform additional processing if required. -RTC_EXPORT -@protocol RTCAudioSessionDelegate - -@optional -/** Called on a system notification thread when AVAudioSession starts an - * interruption event. - */ -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session; - -/** Called on a system notification thread when AVAudioSession ends an - * interruption event. - */ -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session - shouldResumeSession:(BOOL)shouldResumeSession; - -/** Called on a system notification thread when AVAudioSession changes the - * route. - */ -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session - reason:(AVAudioSessionRouteChangeReason)reason - previousRoute:(AVAudioSessionRouteDescription *)previousRoute; - -/** Called on a system notification thread when AVAudioSession media server - * terminates. - */ -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session; - -/** Called on a system notification thread when AVAudioSession media server - * restarts. - */ -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session; - -// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification. - -- (void)audioSession:(RTCAudioSession *)session - didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord; - -/** Called on a WebRTC thread when the audio device is notified to begin - * playback or recording. - */ -- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session; - -/** Called on a WebRTC thread when the audio device is notified to stop - * playback or recording. - */ -- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session; - -/** Called when the AVAudioSession output volume value changes. */ -- (void)audioSession:(RTCAudioSession *)audioSession - didChangeOutputVolume:(float)outputVolume; - -/** Called when the audio device detects a playout glitch. The argument is the - * number of glitches detected so far in the current audio playout session. - */ -- (void)audioSession:(RTCAudioSession *)audioSession - didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches; - -@end - -/** This is a protocol used to inform RTCAudioSession when the audio session - * activation state has changed outside of RTCAudioSession. The current known use - * case of this is when CallKit activates the audio session for the application - */ -RTC_EXPORT -@protocol RTCAudioSessionActivationDelegate - -/** Called when the audio session is activated outside of the app by iOS. */ -- (void)audioSessionDidActivate:(AVAudioSession *)session; - -/** Called when the audio session is deactivated outside of the app by iOS. */ -- (void)audioSessionDidDeactivate:(AVAudioSession *)session; - -@end - -/** Proxy class for AVAudioSession that adds a locking mechanism similar to - * AVCaptureDevice. This is used to that interleaving configurations between - * WebRTC and the application layer are avoided. - * - * RTCAudioSession also coordinates activation so that the audio session is - * activated only once. See |setActive:error:|. - */ -RTC_EXPORT -@interface RTCAudioSession : NSObject - -/** Convenience property to access the AVAudioSession singleton. Callers should - * not call setters on AVAudioSession directly, but other method invocations - * are fine. - */ -@property(nonatomic, readonly) AVAudioSession *session; - -/** Our best guess at whether the session is active based on results of calls to - * AVAudioSession. - */ -@property(nonatomic, readonly) BOOL isActive; -/** Whether RTCAudioSession is currently locked for configuration. */ -@property(nonatomic, readonly) BOOL isLocked; - -/** If YES, WebRTC will not initialize the audio unit automatically when an - * audio track is ready for playout or recording. Instead, applications should - * call setIsAudioEnabled. If NO, WebRTC will initialize the audio unit - * as soon as an audio track is ready for playout or recording. - */ -@property(nonatomic, assign) BOOL useManualAudio; - -/** This property is only effective if useManualAudio is YES. - * Represents permission for WebRTC to initialize the VoIP audio unit. - * When set to NO, if the VoIP audio unit used by WebRTC is active, it will be - * stopped and uninitialized. This will stop incoming and outgoing audio. - * When set to YES, WebRTC will initialize and start the audio unit when it is - * needed (e.g. due to establishing an audio connection). - * This property was introduced to work around an issue where if an AVPlayer is - * playing audio while the VoIP audio unit is initialized, its audio would be - * either cut off completely or played at a reduced volume. By preventing - * the audio unit from being initialized until after the audio has completed, - * we are able to prevent the abrupt cutoff. - */ -@property(nonatomic, assign) BOOL isAudioEnabled; - -// Proxy properties. -@property(readonly) NSString *category; -@property(readonly) AVAudioSessionCategoryOptions categoryOptions; -@property(readonly) NSString *mode; -@property(readonly) BOOL secondaryAudioShouldBeSilencedHint; -@property(readonly) AVAudioSessionRouteDescription *currentRoute; -@property(readonly) NSInteger maximumInputNumberOfChannels; -@property(readonly) NSInteger maximumOutputNumberOfChannels; -@property(readonly) float inputGain; -@property(readonly) BOOL inputGainSettable; -@property(readonly) BOOL inputAvailable; -@property(readonly, nullable) - NSArray * inputDataSources; -@property(readonly, nullable) - AVAudioSessionDataSourceDescription *inputDataSource; -@property(readonly, nullable) - NSArray * outputDataSources; -@property(readonly, nullable) - AVAudioSessionDataSourceDescription *outputDataSource; -@property(readonly) double sampleRate; -@property(readonly) double preferredSampleRate; -@property(readonly) NSInteger inputNumberOfChannels; -@property(readonly) NSInteger outputNumberOfChannels; -@property(readonly) float outputVolume; -@property(readonly) NSTimeInterval inputLatency; -@property(readonly) NSTimeInterval outputLatency; -@property(readonly) NSTimeInterval IOBufferDuration; -@property(readonly) NSTimeInterval preferredIOBufferDuration; - -/** Default constructor. */ -+ (instancetype)sharedInstance; -- (instancetype)init NS_UNAVAILABLE; - -/** Adds a delegate, which is held weakly. */ -- (void)addDelegate:(id)delegate; -/** Removes an added delegate. */ -- (void)removeDelegate:(id)delegate; - -/** Request exclusive access to the audio session for configuration. This call - * will block if the lock is held by another object. - */ -- (void)lockForConfiguration; -/** Relinquishes exclusive access to the audio session. */ -- (void)unlockForConfiguration; - -/** If |active|, activates the audio session if it isn't already active. - * Successful calls must be balanced with a setActive:NO when activation is no - * longer required. If not |active|, deactivates the audio session if one is - * active and this is the last balanced call. When deactivating, the - * AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation option is passed to - * AVAudioSession. - */ -- (BOOL)setActive:(BOOL)active - error:(NSError **)outError; - -// The following methods are proxies for the associated methods on -// AVAudioSession. |lockForConfiguration| must be called before using them -// otherwise they will fail with kRTCAudioSessionErrorLockRequired. - -- (BOOL)setCategory:(NSString *)category - withOptions:(AVAudioSessionCategoryOptions)options - error:(NSError **)outError; -- (BOOL)setMode:(NSString *)mode error:(NSError **)outError; -- (BOOL)setInputGain:(float)gain error:(NSError **)outError; -- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError; -- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration - error:(NSError **)outError; -- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count - error:(NSError **)outError; -- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count - error:(NSError **)outError; -- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride - error:(NSError **)outError; -- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort - error:(NSError **)outError; -- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource - error:(NSError **)outError; -- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource - error:(NSError **)outError; -@end - -@interface RTCAudioSession (Configuration) - -/** Applies the configuration to the current session. Attempts to set all - * properties even if previous ones fail. Only the last error will be - * returned. - * |lockForConfiguration| must be called first. - */ -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration - error:(NSError **)outError; - -/** Convenience method that calls both setConfiguration and setActive. - * |lockForConfiguration| must be called first. - */ -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration - active:(BOOL)active - error:(NSError **)outError; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCAudioSessionConfiguration.h b/ios/WebRTC.framework/Headers/RTCAudioSessionConfiguration.h deleted file mode 100644 index 6a02751d29..0000000000 --- a/ios/WebRTC.framework/Headers/RTCAudioSessionConfiguration.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import "WebRTC/RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -extern const int kRTCAudioSessionPreferredNumberOfChannels; -extern const double kRTCAudioSessionHighPerformanceSampleRate; -extern const double kRTCAudioSessionLowComplexitySampleRate; -extern const double kRTCAudioSessionHighPerformanceIOBufferDuration; -extern const double kRTCAudioSessionLowComplexityIOBufferDuration; - -// Struct to hold configuration values. -RTC_EXPORT -@interface RTCAudioSessionConfiguration : NSObject - -@property(nonatomic, strong) NSString *category; -@property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions; -@property(nonatomic, strong) NSString *mode; -@property(nonatomic, assign) double sampleRate; -@property(nonatomic, assign) NSTimeInterval ioBufferDuration; -@property(nonatomic, assign) NSInteger inputNumberOfChannels; -@property(nonatomic, assign) NSInteger outputNumberOfChannels; - -/** Initializes configuration to defaults. */ -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -/** Returns the current configuration of the audio session. */ -+ (instancetype)currentConfiguration; -/** Returns the configuration that WebRTC needs. */ -+ (instancetype)webRTCConfiguration; -/** Provide a way to override the default configuration. */ -+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCAudioSource.h b/ios/WebRTC.framework/Headers/RTCAudioSource.h deleted file mode 100644 index 3db31573cb..0000000000 --- a/ios/WebRTC.framework/Headers/RTCAudioSource.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCAudioSource : RTCMediaSource - -- (instancetype)init NS_UNAVAILABLE; - -// Sets the volume for the RTCMediaSource. |volume| is a gain value in the range -// [0, 10]. -// Temporary fix to be able to modify volume of remote audio tracks. -// TODO(kthelgason): Property stays here temporarily until a proper volume-api -// is available on the surface exposed by webrtc. -@property(nonatomic, assign) double volume; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCAudioTrack.h b/ios/WebRTC.framework/Headers/RTCAudioTrack.h deleted file mode 100644 index b83eede528..0000000000 --- a/ios/WebRTC.framework/Headers/RTCAudioTrack.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -@class RTCAudioSource; - -RTC_EXPORT -@interface RTCAudioTrack : RTCMediaStreamTrack - -- (instancetype)init NS_UNAVAILABLE; - -/** The audio source for this audio track. */ -@property(nonatomic, readonly) RTCAudioSource *source; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCCameraPreviewView.h b/ios/WebRTC.framework/Headers/RTCCameraPreviewView.h deleted file mode 100644 index 9018aec3d1..0000000000 --- a/ios/WebRTC.framework/Headers/RTCCameraPreviewView.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import - -@class AVCaptureSession; -@class RTCAVFoundationVideoSource; - -/** RTCCameraPreviewView is a view that renders local video from an - * AVCaptureSession. - */ -RTC_EXPORT -@interface RTCCameraPreviewView : UIView - -/** The capture session being rendered in the view. Capture session - * is assigned to AVCaptureVideoPreviewLayer async in the same - * queue that the AVCaptureSession is started/stopped. - */ -@property(nonatomic, strong) AVCaptureSession *captureSession; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCCameraVideoCapturer.h b/ios/WebRTC.framework/Headers/RTCCameraVideoCapturer.h deleted file mode 100644 index 943ddc8685..0000000000 --- a/ios/WebRTC.framework/Headers/RTCCameraVideoCapturer.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate -// (usually RTCVideoSource). -@interface RTCCameraVideoCapturer : RTCVideoCapturer - -// Capture session that is used for capturing. Valid from initialization to dealloc. -@property(readonly, nonatomic) AVCaptureSession *captureSession; - -// Returns list of available capture devices that support video capture. -+ (NSArray *)captureDevices; -// Returns list of formats that are supported by this class for this device. -+ (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; - -// Starts and stops the capture session asynchronously. -- (void)startCaptureWithDevice:(AVCaptureDevice *)device - format:(AVCaptureDeviceFormat *)format - fps:(NSInteger)fps; -// Stops the capture session asynchronously. -- (void)stopCapture; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCConfiguration.h b/ios/WebRTC.framework/Headers/RTCConfiguration.h deleted file mode 100644 index fcb62b9416..0000000000 --- a/ios/WebRTC.framework/Headers/RTCConfiguration.h +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -@class RTCIceServer; -@class RTCIntervalRange; - -/** - * Represents the ice transport policy. This exposes the same states in C++, - * which include one more state than what exists in the W3C spec. - */ -typedef NS_ENUM(NSInteger, RTCIceTransportPolicy) { - RTCIceTransportPolicyNone, - RTCIceTransportPolicyRelay, - RTCIceTransportPolicyNoHost, - RTCIceTransportPolicyAll -}; - -/** Represents the bundle policy. */ -typedef NS_ENUM(NSInteger, RTCBundlePolicy) { - RTCBundlePolicyBalanced, - RTCBundlePolicyMaxCompat, - RTCBundlePolicyMaxBundle -}; - -/** Represents the rtcp mux policy. */ -typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) { - RTCRtcpMuxPolicyNegotiate, - RTCRtcpMuxPolicyRequire -}; - -/** Represents the tcp candidate policy. */ -typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) { - RTCTcpCandidatePolicyEnabled, - RTCTcpCandidatePolicyDisabled -}; - -/** Represents the candidate network policy. */ -typedef NS_ENUM(NSInteger, RTCCandidateNetworkPolicy) { - RTCCandidateNetworkPolicyAll, - RTCCandidateNetworkPolicyLowCost -}; - -/** Represents the continual gathering policy. */ -typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) { - RTCContinualGatheringPolicyGatherOnce, - RTCContinualGatheringPolicyGatherContinually -}; - -/** Represents the encryption key type. */ -typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) { - RTCEncryptionKeyTypeRSA, - RTCEncryptionKeyTypeECDSA, -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCConfiguration : NSObject - -/** An array of Ice Servers available to be used by ICE. */ -@property(nonatomic, copy) NSArray *iceServers; - -/** Which candidates the ICE agent is allowed to use. The W3C calls it - * |iceTransportPolicy|, while in C++ it is called |type|. */ -@property(nonatomic, assign) RTCIceTransportPolicy iceTransportPolicy; - -/** The media-bundling policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCBundlePolicy bundlePolicy; - -/** The rtcp-mux policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy; -@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy; -@property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy; -@property(nonatomic, assign) - RTCContinualGatheringPolicy continualGatheringPolicy; - -/** By default, the PeerConnection will use a limited number of IPv6 network - * interfaces, in order to avoid too many ICE candidate pairs being created - * and delaying ICE completion. - * - * Can be set to INT_MAX to effectively disable the limit. - */ -@property(nonatomic, assign) int maxIPv6Networks; - -@property(nonatomic, assign) int audioJitterBufferMaxPackets; -@property(nonatomic, assign) BOOL audioJitterBufferFastAccelerate; -@property(nonatomic, assign) int iceConnectionReceivingTimeout; -@property(nonatomic, assign) int iceBackupCandidatePairPingInterval; - -/** Key type used to generate SSL identity. Default is ECDSA. */ -@property(nonatomic, assign) RTCEncryptionKeyType keyType; - -/** ICE candidate pool size as defined in JSEP. Default is 0. */ -@property(nonatomic, assign) int iceCandidatePoolSize; - -/** Prune turn ports on the same network to the same turn server. - * Default is NO. - */ -@property(nonatomic, assign) BOOL shouldPruneTurnPorts; - -/** If set to YES, this means the ICE transport should presume TURN-to-TURN - * candidate pairs will succeed, even before a binding response is received. - */ -@property(nonatomic, assign) BOOL shouldPresumeWritableWhenFullyRelayed; - -/** If set to non-nil, controls the minimal interval between consecutive ICE - * check packets. - */ -@property(nonatomic, copy, nullable) NSNumber *iceCheckMinInterval; - -/** ICE Periodic Regathering - * If set, WebRTC will periodically create and propose candidates without - * starting a new ICE generation. The regathering happens continuously with - * interval specified in milliseconds by the uniform distribution [a, b]. - */ -@property(nonatomic, strong, nullable) RTCIntervalRange *iceRegatherIntervalRange; - -- (instancetype)init; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCDataChannel.h b/ios/WebRTC.framework/Headers/RTCDataChannel.h deleted file mode 100644 index 893bd0a336..0000000000 --- a/ios/WebRTC.framework/Headers/RTCDataChannel.h +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCDataBuffer : NSObject - -/** NSData representation of the underlying buffer. */ -@property(nonatomic, readonly) NSData *data; - -/** Indicates whether |data| contains UTF-8 or binary data. */ -@property(nonatomic, readonly) BOOL isBinary; - -- (instancetype)init NS_UNAVAILABLE; - -/** - * Initialize an RTCDataBuffer from NSData. |isBinary| indicates whether |data| - * contains UTF-8 or binary data. - */ -- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary; - -@end - - -@class RTCDataChannel; -RTC_EXPORT -@protocol RTCDataChannelDelegate - -/** The data channel state changed. */ -- (void)dataChannelDidChangeState:(RTCDataChannel *)dataChannel; - -/** The data channel successfully received a data buffer. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel - didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer; - -@optional -/** The data channel's |bufferedAmount| changed. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel - didChangeBufferedAmount:(uint64_t)amount; - -@end - - -/** Represents the state of the data channel. */ -typedef NS_ENUM(NSInteger, RTCDataChannelState) { - RTCDataChannelStateConnecting, - RTCDataChannelStateOpen, - RTCDataChannelStateClosing, - RTCDataChannelStateClosed, -}; - -RTC_EXPORT -@interface RTCDataChannel : NSObject - -/** - * A label that can be used to distinguish this data channel from other data - * channel objects. - */ -@property(nonatomic, readonly) NSString *label; - -/** Whether the data channel can send messages in unreliable mode. */ -@property(nonatomic, readonly) BOOL isReliable DEPRECATED_ATTRIBUTE; - -/** Returns whether this data channel is ordered or not. */ -@property(nonatomic, readonly) BOOL isOrdered; - -/** Deprecated. Use maxPacketLifeTime. */ -@property(nonatomic, readonly) NSUInteger maxRetransmitTime - DEPRECATED_ATTRIBUTE; - -/** - * The length of the time window (in milliseconds) during which transmissions - * and retransmissions may occur in unreliable mode. - */ -@property(nonatomic, readonly) uint16_t maxPacketLifeTime; - -/** - * The maximum number of retransmissions that are attempted in unreliable mode. - */ -@property(nonatomic, readonly) uint16_t maxRetransmits; - -/** - * The name of the sub-protocol used with this data channel, if any. Otherwise - * this returns an empty string. - */ -@property(nonatomic, readonly) NSString *protocol; - -/** - * Returns whether this data channel was negotiated by the application or not. - */ -@property(nonatomic, readonly) BOOL isNegotiated; - -/** Deprecated. Use channelId. */ -@property(nonatomic, readonly) NSInteger streamId DEPRECATED_ATTRIBUTE; - -/** The identifier for this data channel. */ -@property(nonatomic, readonly) int channelId; - -/** The state of the data channel. */ -@property(nonatomic, readonly) RTCDataChannelState readyState; - -/** - * The number of bytes of application data that have been queued using - * |sendData:| but that have not yet been transmitted to the network. - */ -@property(nonatomic, readonly) uint64_t bufferedAmount; - -/** The delegate for this data channel. */ -@property(nonatomic, weak) id delegate; - -- (instancetype)init NS_UNAVAILABLE; - -/** Closes the data channel. */ -- (void)close; - -/** Attempt to send |data| on this data channel's underlying data transport. */ -- (BOOL)sendData:(RTCDataBuffer *)data; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCDataChannelConfiguration.h b/ios/WebRTC.framework/Headers/RTCDataChannelConfiguration.h deleted file mode 100644 index 65f5931844..0000000000 --- a/ios/WebRTC.framework/Headers/RTCDataChannelConfiguration.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCDataChannelConfiguration : NSObject - -/** Set to YES if ordered delivery is required. */ -@property(nonatomic, assign) BOOL isOrdered; - -/** Deprecated. Use maxPacketLifeTime. */ -@property(nonatomic, assign) NSInteger maxRetransmitTimeMs DEPRECATED_ATTRIBUTE; - -/** - * Max period in milliseconds in which retransmissions will be sent. After this - * time, no more retransmissions will be sent. -1 if unset. - */ -@property(nonatomic, assign) int maxPacketLifeTime; - -/** The max number of retransmissions. -1 if unset. */ -@property(nonatomic, assign) int maxRetransmits; - -/** Set to YES if the channel has been externally negotiated and we do not send - * an in-band signalling in the form of an "open" message. - */ -@property(nonatomic, assign) BOOL isNegotiated; - -/** Deprecated. Use channelId. */ -@property(nonatomic, assign) int streamId DEPRECATED_ATTRIBUTE; - -/** The id of the data channel. */ -@property(nonatomic, assign) int channelId; - -/** Set by the application and opaque to the WebRTC implementation. */ -@property(nonatomic) NSString *protocol; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCDispatcher.h b/ios/WebRTC.framework/Headers/RTCDispatcher.h deleted file mode 100644 index 3dddead60e..0000000000 --- a/ios/WebRTC.framework/Headers/RTCDispatcher.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) { - // Main dispatcher queue. - RTCDispatcherTypeMain, - // Used for starting/stopping AVCaptureSession, and assigning - // capture session to AVCaptureVideoPreviewLayer. - RTCDispatcherTypeCaptureSession, - // Used for operations on AVAudioSession. - RTCDispatcherTypeAudioSession, -}; - -/** Dispatcher that asynchronously dispatches blocks to a specific - * shared dispatch queue. - */ -RTC_EXPORT -@interface RTCDispatcher : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -/** Dispatch the block asynchronously on the queue for dispatchType. - * @param dispatchType The queue type to dispatch on. - * @param block The block to dispatch asynchronously. - */ -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType - block:(dispatch_block_t)block; - -/** Returns YES if run on queue for the dispatchType otherwise NO. - * Useful for asserting that a method is run on a correct queue. - */ -+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCEAGLVideoView.h b/ios/WebRTC.framework/Headers/RTCEAGLVideoView.h deleted file mode 100644 index 60c624e0d2..0000000000 --- a/ios/WebRTC.framework/Headers/RTCEAGLVideoView.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -@class RTCEAGLVideoView; -RTC_EXPORT -@protocol RTCEAGLVideoViewDelegate - -- (void)videoView:(RTCEAGLVideoView *)videoView didChangeVideoSize:(CGSize)size; - -@end - -/** - * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its - * bounds using OpenGLES 2.0 or OpenGLES 3.0. - */ -RTC_EXPORT -@interface RTCEAGLVideoView : UIView - -@property(nonatomic, weak) id delegate; - -- (instancetype)initWithFrame:(CGRect)frame - shader:(id)shader NS_DESIGNATED_INITIALIZER; - -- (instancetype)initWithCoder:(NSCoder *)aDecoder - shader:(id)shader NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCFieldTrials.h b/ios/WebRTC.framework/Headers/RTCFieldTrials.h deleted file mode 100644 index 78d69610d8..0000000000 --- a/ios/WebRTC.framework/Headers/RTCFieldTrials.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -/** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */ -RTC_EXTERN NSString * const kRTCFieldTrialAudioSendSideBweKey; -RTC_EXTERN NSString * const kRTCFieldTrialSendSideBweWithOverheadKey; -RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey; -RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key; -RTC_EXTERN NSString * const kRTCFieldTrialImprovedBitrateEstimateKey; -RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey; -RTC_EXTERN NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey; - -/** The valid value for field trials above. */ -RTC_EXTERN NSString * const kRTCFieldTrialEnabledValue; - -/** Use a string returned by RTCFieldTrialMedianSlopeFilterValue as the value. */ -RTC_EXTERN NSString * const kRTCFieldTrialMedianSlopeFilterKey; -RTC_EXTERN NSString *RTCFieldTrialMedianSlopeFilterValue( - size_t windowSize, double thresholdGain); - -/** Use a string returned by RTCFieldTrialTrendlineFilterValue as the value. */ -RTC_EXTERN NSString * const kRTCFieldTrialTrendlineFilterKey; -/** Returns a valid value for kRTCFieldTrialTrendlineFilterKey. */ -RTC_EXTERN NSString *RTCFieldTrialTrendlineFilterValue( - size_t windowSize, double smoothingCoeff, double thresholdGain); - -/** Initialize field trials using a dictionary mapping field trial keys to their values. See above - * for valid keys and values. - * Must be called before any other call into WebRTC. See: - * webrtc/system_wrappers/include/field_trial_default.h - */ -RTC_EXTERN void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials); diff --git a/ios/WebRTC.framework/Headers/RTCFileLogger.h b/ios/WebRTC.framework/Headers/RTCFileLogger.h deleted file mode 100644 index 5656b7bf43..0000000000 --- a/ios/WebRTC.framework/Headers/RTCFileLogger.h +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) { - RTCFileLoggerSeverityVerbose, - RTCFileLoggerSeverityInfo, - RTCFileLoggerSeverityWarning, - RTCFileLoggerSeverityError -}; - -typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) { - RTCFileLoggerTypeCall, - RTCFileLoggerTypeApp, -}; - -NS_ASSUME_NONNULL_BEGIN - -// This class intercepts WebRTC logs and saves them to a file. The file size -// will not exceed the given maximum bytesize. When the maximum bytesize is -// reached, logs are rotated according to the rotationType specified. -// For kRTCFileLoggerTypeCall, logs from the beginning and the end -// are preserved while the middle section is overwritten instead. -// For kRTCFileLoggerTypeApp, the oldest log is overwritten. -// This class is not threadsafe. -RTC_EXPORT -@interface RTCFileLogger : NSObject - -// The severity level to capture. The default is kRTCFileLoggerSeverityInfo. -@property(nonatomic, assign) RTCFileLoggerSeverity severity; - -// The rotation type for this file logger. The default is -// kRTCFileLoggerTypeCall. -@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType; - -// Disables buffering disk writes. Should be set before |start|. Buffering -// is enabled by default for performance. -@property(nonatomic, assign) BOOL shouldDisableBuffering; - -// Default constructor provides default settings for dir path, file size and -// rotation type. -- (instancetype)init; - -// Create file logger with default rotation type. -- (instancetype)initWithDirPath:(NSString *)dirPath - maxFileSize:(NSUInteger)maxFileSize; - -- (instancetype)initWithDirPath:(NSString *)dirPath - maxFileSize:(NSUInteger)maxFileSize - rotationType:(RTCFileLoggerRotationType)rotationType - NS_DESIGNATED_INITIALIZER; - -// Starts writing WebRTC logs to disk if not already started. Overwrites any -// existing file(s). -- (void)start; - -// Stops writing WebRTC logs to disk. This method is also called on dealloc. -- (void)stop; - -// Returns the current contents of the logs, or nil if start has been called -// without a stop. -- (NSData *)logData; - -@end - -NS_ASSUME_NONNULL_END - diff --git a/ios/WebRTC.framework/Headers/RTCIceCandidate.h b/ios/WebRTC.framework/Headers/RTCIceCandidate.h deleted file mode 100644 index 3b7f3e26e5..0000000000 --- a/ios/WebRTC.framework/Headers/RTCIceCandidate.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCIceCandidate : NSObject - -/** - * If present, the identifier of the "media stream identification" for the media - * component this candidate is associated with. - */ -@property(nonatomic, readonly, nullable) NSString *sdpMid; - -/** - * The index (starting at zero) of the media description this candidate is - * associated with in the SDP. - */ -@property(nonatomic, readonly) int sdpMLineIndex; - -/** The SDP string for this candidate. */ -@property(nonatomic, readonly) NSString *sdp; - -/** The URL of the ICE server which this candidate is gathered from. */ -@property(nonatomic, readonly, nullable) NSString *serverUrl; - -- (instancetype)init NS_UNAVAILABLE; - -/** - * Initialize an RTCIceCandidate from SDP. - */ -- (instancetype)initWithSdp:(NSString *)sdp - sdpMLineIndex:(int)sdpMLineIndex - sdpMid:(nullable NSString *)sdpMid - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCIceServer.h b/ios/WebRTC.framework/Headers/RTCIceServer.h deleted file mode 100644 index 727da8acbe..0000000000 --- a/ios/WebRTC.framework/Headers/RTCIceServer.h +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) { - RTCTlsCertPolicySecure, - RTCTlsCertPolicyInsecureNoCheck -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCIceServer : NSObject - -/** URI(s) for this server represented as NSStrings. */ -@property(nonatomic, readonly) NSArray *urlStrings; - -/** Username to use if this RTCIceServer object is a TURN server. */ -@property(nonatomic, readonly, nullable) NSString *username; - -/** Credential to use if this RTCIceServer object is a TURN server. */ -@property(nonatomic, readonly, nullable) NSString *credential; - -/** - * TLS certificate policy to use if this RTCIceServer object is a TURN server. - */ -@property(nonatomic, readonly) RTCTlsCertPolicy tlsCertPolicy; - -/** - If the URIs in |urls| only contain IP addresses, this field can be used - to indicate the hostname, which may be necessary for TLS (using the SNI - extension). If |urls| itself contains the hostname, this isn't necessary. - */ -@property(nonatomic, readonly, nullable) NSString *hostname; - -/** List of protocols to be used in the TLS ALPN extension. */ -@property(nonatomic, readonly) NSArray *tlsAlpnProtocols; - -/** - List elliptic curves to be used in the TLS elliptic curves extension. - Only curve names supported by OpenSSL should be used (eg. "P-256","X25519"). - */ -@property(nonatomic, readonly) NSArray *tlsEllipticCurves; - -- (nonnull instancetype)init NS_UNAVAILABLE; - -/** Convenience initializer for a server with no authentication (e.g. STUN). */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, and credentialType. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, and TLS cert policy. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, TLS cert policy and hostname. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, TLS cert policy, hostname and ALPN protocols. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname - tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols; - -/** - * Initialize an RTCIceServer with its associated URLs, optional username, - * optional credential, TLS cert policy, hostname, ALPN protocols and - * elliptic curves. - */ -- (instancetype)initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname - tlsAlpnProtocols:(nullable NSArray *)tlsAlpnProtocols - tlsEllipticCurves:(nullable NSArray *)tlsEllipticCurves - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCIntervalRange.h b/ios/WebRTC.framework/Headers/RTCIntervalRange.h deleted file mode 100644 index 9384b15368..0000000000 --- a/ios/WebRTC.framework/Headers/RTCIntervalRange.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -NS_ASSUME_NONNULL_BEGIN - -@interface RTCIntervalRange : NSObject - -@property(nonatomic, readonly) NSInteger min; -@property(nonatomic, readonly) NSInteger max; - -- (instancetype)init; -- (instancetype)initWithMin:(NSInteger)min - max:(NSInteger)max - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END - diff --git a/ios/WebRTC.framework/Headers/RTCLegacyStatsReport.h b/ios/WebRTC.framework/Headers/RTCLegacyStatsReport.h deleted file mode 100644 index 14d996c1ce..0000000000 --- a/ios/WebRTC.framework/Headers/RTCLegacyStatsReport.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -/** This does not currently conform to the spec. */ -RTC_EXPORT -@interface RTCLegacyStatsReport : NSObject - -/** Time since 1970-01-01T00:00:00Z in milliseconds. */ -@property(nonatomic, readonly) CFTimeInterval timestamp; - -/** The type of stats held by this object. */ -@property(nonatomic, readonly) NSString *type; - -/** The identifier for this object. */ -@property(nonatomic, readonly) NSString *reportId; - -/** A dictionary holding the actual stats. */ -@property(nonatomic, readonly) NSDictionary *values; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCLogging.h b/ios/WebRTC.framework/Headers/RTCLogging.h deleted file mode 100644 index e4f1920382..0000000000 --- a/ios/WebRTC.framework/Headers/RTCLogging.h +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -// Subset of rtc::LoggingSeverity. -typedef NS_ENUM(NSInteger, RTCLoggingSeverity) { - RTCLoggingSeverityVerbose, - RTCLoggingSeverityInfo, - RTCLoggingSeverityWarning, - RTCLoggingSeverityError, -}; - -// Wrapper for C++ LOG(sev) macros. -// Logs the log string to the webrtc logstream for the given severity. -RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string); - -// Wrapper for rtc::LogMessage::LogToDebug. -// Sets the minimum severity to be logged to console. -RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity); - -// Returns the filename with the path prefix removed. -RTC_EXTERN NSString* RTCFileName(const char* filePath); - -// Some convenience macros. - -#define RTCLogString(format, ...) \ - [NSString stringWithFormat:@"(%@:%d %s): " format, \ - RTCFileName(__FILE__), \ - __LINE__, \ - __FUNCTION__, \ - ##__VA_ARGS__] - -#define RTCLogFormat(severity, format, ...) \ - do { \ - NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \ - RTCLogEx(severity, log_string); \ - } while (false) - -#define RTCLogVerbose(format, ...) \ - RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__) \ - -#define RTCLogInfo(format, ...) \ - RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__) \ - -#define RTCLogWarning(format, ...) \ - RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__) \ - -#define RTCLogError(format, ...) \ - RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__) \ - -#if !defined(NDEBUG) -#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__) -#else -#define RTCLogDebug(format, ...) \ - do { \ - } while (false) -#endif - -#define RTCLog(format, ...) RTCLogInfo(format, ##__VA_ARGS__) diff --git a/ios/WebRTC.framework/Headers/RTCMTLVideoView.h b/ios/WebRTC.framework/Headers/RTCMTLVideoView.h deleted file mode 100644 index 226c62a6a1..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMTLVideoView.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "WebRTC/RTCVideoRenderer.h" - -// Check if metal is supported in WebRTC. -// NOTE: Currently arm64 == Metal. -#if defined(__aarch64__) -#define RTC_SUPPORTS_METAL -#endif - -NS_ASSUME_NONNULL_BEGIN - -/** - * RTCMTLVideoView is thin wrapper around MTKView. - * - * It has id property that renders video frames in the view's - * bounds using Metal. - * NOTE: always check if metal is available on the running device via - * RTC_SUPPORTS_METAL macro before initializing this class. - */ -NS_CLASS_AVAILABLE_IOS(9) - -RTC_EXPORT -@interface RTCMTLVideoView : UIView - -@end -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCMacros.h b/ios/WebRTC.framework/Headers/RTCMacros.h deleted file mode 100644 index 08cb93829a..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMacros.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef SDK_OBJC_FRAMEWORK_HEADERS_WEBRTC_RTCMACROS_H_ -#define SDK_OBJC_FRAMEWORK_HEADERS_WEBRTC_RTCMACROS_H_ - -#define RTC_EXPORT __attribute__((visibility("default"))) - -#if defined(__cplusplus) -#define RTC_EXTERN extern "C" RTC_EXPORT -#else -#define RTC_EXTERN extern RTC_EXPORT -#endif - -#ifdef __OBJC__ -#define RTC_FWD_DECL_OBJC_CLASS(classname) @class classname -#else -#define RTC_FWD_DECL_OBJC_CLASS(classname) typedef struct objc_object classname -#endif - -#endif // SDK_OBJC_FRAMEWORK_HEADERS_WEBRTC_RTCMACROS_H_ diff --git a/ios/WebRTC.framework/Headers/RTCMediaConstraints.h b/ios/WebRTC.framework/Headers/RTCMediaConstraints.h deleted file mode 100644 index 26a116888a..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMediaConstraints.h +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -/** Constraint keys for media sources. */ -RTC_EXTERN NSString * const kRTCMediaConstraintsMinAspectRatio; -RTC_EXTERN NSString * const kRTCMediaConstraintsMaxAspectRatio; -RTC_EXTERN NSString * const kRTCMediaConstraintsMaxWidth; -RTC_EXTERN NSString * const kRTCMediaConstraintsMinWidth; -RTC_EXTERN NSString * const kRTCMediaConstraintsMaxHeight; -RTC_EXTERN NSString * const kRTCMediaConstraintsMinHeight; -RTC_EXTERN NSString * const kRTCMediaConstraintsMaxFrameRate; -RTC_EXTERN NSString * const kRTCMediaConstraintsMinFrameRate; -RTC_EXTERN NSString * const kRTCMediaConstraintsLevelControl; -/** The value for this key should be a base64 encoded string containing - * the data from the serialized configuration proto. - */ -RTC_EXTERN NSString * const kRTCMediaConstraintsAudioNetworkAdaptorConfig; - -/** Constraint keys for generating offers and answers. */ -RTC_EXTERN NSString * const kRTCMediaConstraintsIceRestart; -RTC_EXTERN NSString * const kRTCMediaConstraintsOfferToReceiveAudio; -RTC_EXTERN NSString * const kRTCMediaConstraintsOfferToReceiveVideo; -RTC_EXTERN NSString * const kRTCMediaConstraintsVoiceActivityDetection; - -/** Constraint values for Boolean parameters. */ -RTC_EXTERN NSString * const kRTCMediaConstraintsValueTrue; -RTC_EXTERN NSString * const kRTCMediaConstraintsValueFalse; - -RTC_EXPORT -@interface RTCMediaConstraints : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -/** Initialize with mandatory and/or optional constraints. */ -- (instancetype)initWithMandatoryConstraints: - (nullable NSDictionary *)mandatory - optionalConstraints: - (nullable NSDictionary *)optional - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCMediaSource.h b/ios/WebRTC.framework/Headers/RTCMediaSource.h deleted file mode 100644 index 6cfcb7d2ce..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMediaSource.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -typedef NS_ENUM(NSInteger, RTCSourceState) { - RTCSourceStateInitializing, - RTCSourceStateLive, - RTCSourceStateEnded, - RTCSourceStateMuted, -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCMediaSource : NSObject - -/** The current state of the RTCMediaSource. */ -@property(nonatomic, readonly) RTCSourceState state; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCMediaStream.h b/ios/WebRTC.framework/Headers/RTCMediaStream.h deleted file mode 100644 index b97960d436..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMediaStream.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -@class RTCAudioTrack; -@class RTCPeerConnectionFactory; -@class RTCVideoTrack; - -RTC_EXPORT -@interface RTCMediaStream : NSObject - -/** The audio tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *audioTracks; - -/** The video tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *videoTracks; - -/** An identifier for this media stream. */ -@property(nonatomic, readonly) NSString *streamId; - -- (instancetype)init NS_UNAVAILABLE; - -/** Adds the given audio track to this media stream. */ -- (void)addAudioTrack:(RTCAudioTrack *)audioTrack; - -/** Adds the given video track to this media stream. */ -- (void)addVideoTrack:(RTCVideoTrack *)videoTrack; - -/** Removes the given audio track to this media stream. */ -- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack; - -/** Removes the given video track to this media stream. */ -- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCMediaStreamTrack.h b/ios/WebRTC.framework/Headers/RTCMediaStreamTrack.h deleted file mode 100644 index c42f0cfb1a..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMediaStreamTrack.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -/** - * Represents the state of the track. This exposes the same states in C++. - */ -typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) { - RTCMediaStreamTrackStateLive, - RTCMediaStreamTrackStateEnded -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXTERN NSString * const kRTCMediaStreamTrackKindAudio; -RTC_EXTERN NSString * const kRTCMediaStreamTrackKindVideo; - -RTC_EXPORT -@interface RTCMediaStreamTrack : NSObject - -/** - * The kind of track. For example, "audio" if this track represents an audio - * track and "video" if this track represents a video track. - */ -@property(nonatomic, readonly) NSString *kind; - -/** An identifier string. */ -@property(nonatomic, readonly) NSString *trackId; - -/** The enabled state of the track. */ -@property(nonatomic, assign) BOOL isEnabled; - -/** The state of the track. */ -@property(nonatomic, readonly) RTCMediaStreamTrackState readyState; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCMetrics.h b/ios/WebRTC.framework/Headers/RTCMetrics.h deleted file mode 100644 index fa2d48fa07..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMetrics.h +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -/** - * Enables gathering of metrics (which can be fetched with - * RTCGetAndResetMetrics). Must be called before any other call into WebRTC. - */ -RTC_EXTERN void RTCEnableMetrics(); - -/** Gets and clears native histograms. */ -RTC_EXTERN NSArray *RTCGetAndResetMetrics(); diff --git a/ios/WebRTC.framework/Headers/RTCMetricsSampleInfo.h b/ios/WebRTC.framework/Headers/RTCMetricsSampleInfo.h deleted file mode 100644 index f01bea9b71..0000000000 --- a/ios/WebRTC.framework/Headers/RTCMetricsSampleInfo.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCMetricsSampleInfo : NSObject - -/** - * Example of RTCMetricsSampleInfo: - * name: "WebRTC.Video.InputFramesPerSecond" - * min: 1 - * max: 100 - * bucketCount: 50 - * samples: [29]:2 [30]:1 - */ - -/** The name of the histogram. */ -@property(nonatomic, readonly) NSString *name; - -/** The minimum bucket value. */ -@property(nonatomic, readonly) int min; - -/** The maximum bucket value. */ -@property(nonatomic, readonly) int max; - -/** The number of buckets. */ -@property(nonatomic, readonly) int bucketCount; - -/** A dictionary holding the samples . */ -@property(nonatomic, readonly) NSDictionary *samples; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCPeerConnection.h b/ios/WebRTC.framework/Headers/RTCPeerConnection.h deleted file mode 100644 index 8c7782c547..0000000000 --- a/ios/WebRTC.framework/Headers/RTCPeerConnection.h +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -@class RTCConfiguration; -@class RTCDataChannel; -@class RTCDataChannelConfiguration; -@class RTCIceCandidate; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCMediaStreamTrack; -@class RTCPeerConnectionFactory; -@class RTCRtpReceiver; -@class RTCRtpSender; -@class RTCVideoTrack; -@class RTCSessionDescription; -@class RTCLegacyStatsReport; - -NS_ASSUME_NONNULL_BEGIN - -extern NSString * const kRTCPeerConnectionErrorDomain; -extern int const kRTCSessionDescriptionErrorCode; - -/** Represents the signaling state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCSignalingState) { - RTCSignalingStateStable, - RTCSignalingStateHaveLocalOffer, - RTCSignalingStateHaveLocalPrAnswer, - RTCSignalingStateHaveRemoteOffer, - RTCSignalingStateHaveRemotePrAnswer, - // Not an actual state, represents the total number of states. - RTCSignalingStateClosed, -}; - -/** Represents the ice connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceConnectionState) { - RTCIceConnectionStateNew, - RTCIceConnectionStateChecking, - RTCIceConnectionStateConnected, - RTCIceConnectionStateCompleted, - RTCIceConnectionStateFailed, - RTCIceConnectionStateDisconnected, - RTCIceConnectionStateClosed, - RTCIceConnectionStateCount, -}; - -/** Represents the ice gathering state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceGatheringState) { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete, -}; - -/** Represents the stats output level. */ -typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) { - RTCStatsOutputLevelStandard, - RTCStatsOutputLevelDebug, -}; - -@class RTCPeerConnection; - -RTC_EXPORT -@protocol RTCPeerConnectionDelegate - -/** Called when the SignalingState changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeSignalingState:(RTCSignalingState)stateChanged; - -/** Called when media is received on a new stream from remote peer. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didAddStream:(RTCMediaStream *)stream; - -/** Called when a remote peer closes a stream. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveStream:(RTCMediaStream *)stream; - -/** Called when media is received on a new track from remote stream. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream - didAddTrack:(RTCMediaStreamTrack*)track; - -/** Called when a remote stream closes a track. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream - didRemoveTrack:(RTCMediaStreamTrack*)track; - -/** Called when negotiation is needed, for example ICE has restarted. */ -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection; - -/** Called any time the IceConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeIceConnectionState:(RTCIceConnectionState)newState; - -/** Called any time the IceGatheringState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeIceGatheringState:(RTCIceGatheringState)newState; - -/** New ice candidate has been found. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didGenerateIceCandidate:(RTCIceCandidate *)candidate; - -/** Called when a group of local Ice candidates have been removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveIceCandidates:(NSArray *)candidates; - -/** New data channel has been opened. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didOpenDataChannel:(RTCDataChannel *)dataChannel; - -@end - -RTC_EXPORT -@interface RTCPeerConnection : NSObject - -/** The object that will be notifed about events such as state changes and - * streams being added or removed. - */ -@property(nonatomic, weak, nullable) id delegate; -@property(nonatomic, readonly) NSArray *localStreams; -@property(nonatomic, readonly, nullable) - RTCSessionDescription *localDescription; -@property(nonatomic, readonly, nullable) - RTCSessionDescription *remoteDescription; -@property(nonatomic, readonly) RTCSignalingState signalingState; -@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState; -@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState; -@property(nonatomic, readonly, copy) RTCConfiguration *configuration; - -/** Gets all RTCRtpSenders associated with this peer connection. - * Note: reading this property returns different instances of RTCRtpSender. - * Use isEqual: instead of == to compare RTCRtpSender instances. - */ -@property(nonatomic, readonly) NSArray *senders; - -/** Gets all RTCRtpReceivers associated with this peer connection. - * Note: reading this property returns different instances of RTCRtpReceiver. - * Use isEqual: instead of == to compare RTCRtpReceiver instances. - */ -@property(nonatomic, readonly) NSArray *receivers; - -- (instancetype)init NS_UNAVAILABLE; - -/** Sets the PeerConnection's global configuration to |configuration|. - * Any changes to STUN/TURN servers or ICE candidate policy will affect the - * next gathering phase, and cause the next call to createOffer to generate - * new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies - * cannot be changed with this method. - */ -- (BOOL)setConfiguration:(RTCConfiguration *)configuration; - -/** Terminate all media and close the transport. */ -- (void)close; - -/** Provide a remote candidate to the ICE Agent. */ -- (void)addIceCandidate:(RTCIceCandidate *)candidate; - -/** Remove a group of remote candidates from the ICE Agent. */ -- (void)removeIceCandidates:(NSArray *)candidates; - -/** Add a new media stream to be sent on this peer connection. */ -- (void)addStream:(RTCMediaStream *)stream; - -/** Remove the given media stream from this peer connection. */ -- (void)removeStream:(RTCMediaStream *)stream; - -/** Generate an SDP offer. */ -- (void)offerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^) - (RTCSessionDescription * _Nullable sdp, - NSError * _Nullable error))completionHandler; - -/** Generate an SDP answer. */ -- (void)answerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^) - (RTCSessionDescription * _Nullable sdp, - NSError * _Nullable error))completionHandler; - -/** Apply the supplied RTCSessionDescription as the local description. */ -- (void)setLocalDescription:(RTCSessionDescription *)sdp - completionHandler: - (nullable void (^)(NSError * _Nullable error))completionHandler; - -/** Apply the supplied RTCSessionDescription as the remote description. */ -- (void)setRemoteDescription:(RTCSessionDescription *)sdp - completionHandler: - (nullable void (^)(NSError * _Nullable error))completionHandler; - -/** Limits the bandwidth allocated for all RTP streams sent by this - * PeerConnection. Nil parameters will be unchanged. Setting - * |currentBitrateBps| will force the available bitrate estimate to the given - * value. Returns YES if the parameters were successfully updated. - */ -- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps - currentBitrateBps:(nullable NSNumber *)currentBitrateBps - maxBitrateBps:(nullable NSNumber *)maxBitrateBps; - -/** Start or stop recording an Rtc EventLog. */ -- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath - maxSizeInBytes:(int64_t)maxSizeInBytes; -- (void)stopRtcEventLog; - -@end - -@interface RTCPeerConnection (Media) - -/** Create an RTCRtpSender with the specified kind and media stream ID. - * See RTCMediaStreamTrack.h for available kinds. - */ -- (RTCRtpSender *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId; - -@end - -@interface RTCPeerConnection (DataChannel) - -/** Create a new data channel with the given label and configuration. */ -- (RTCDataChannel *)dataChannelForLabel:(NSString *)label - configuration:(RTCDataChannelConfiguration *)configuration; - -@end - -@interface RTCPeerConnection (Stats) - -/** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil - * statistics are gathered for all tracks. - */ -- (void)statsForTrack: - (nullable RTCMediaStreamTrack *)mediaStreamTrack - statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel - completionHandler: - (nullable void (^)(NSArray *stats))completionHandler; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCPeerConnectionFactory.h b/ios/WebRTC.framework/Headers/RTCPeerConnectionFactory.h deleted file mode 100644 index 42acbfcd7e..0000000000 --- a/ios/WebRTC.framework/Headers/RTCPeerConnectionFactory.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -@class RTCAVFoundationVideoSource; -@class RTCAudioSource; -@class RTCAudioTrack; -@class RTCConfiguration; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCPeerConnection; -@class RTCVideoSource; -@class RTCVideoTrack; -@protocol RTCPeerConnectionDelegate; -@protocol RTCVideoDecoderFactory; -@protocol RTCVideoEncoderFactory; - -RTC_EXPORT -@interface RTCPeerConnectionFactory : NSObject - -/* Initialize object with default H264 video encoder/decoder factories */ -- (instancetype)init; - -/* Initialize object with injectable video encoder/decoder factories */ -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory; - -/** Initialize an RTCAudioSource with constraints. */ -- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints; - -/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source with no - * constraints. - */ -- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId; - -/** Initialize an RTCAudioTrack with a source and an id. */ -- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source - trackId:(NSString *)trackId; - -/** Initialize an RTCAVFoundationVideoSource with constraints. */ -- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints: - (nullable RTCMediaConstraints *)constraints; - -/** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer - * implementation, e.g. RTCCameraVideoCapturer, in order to produce frames. - */ -- (RTCVideoSource *)videoSource; - -/** Initialize an RTCVideoTrack with a source and an id. */ -- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source - trackId:(NSString *)trackId; - -/** Initialize an RTCMediaStream with an id. */ -- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId; - -/** Initialize an RTCPeerConnection with a configuration, constraints, and - * delegate. - */ -- (RTCPeerConnection *)peerConnectionWithConfiguration: - (RTCConfiguration *)configuration - constraints: - (RTCMediaConstraints *)constraints - delegate: - (nullable id)delegate; - -/** Start an AecDump recording. This API call will likely change in the future. */ -- (BOOL)startAecDumpWithFilePath:(NSString *)filePath - maxSizeInBytes:(int64_t)maxSizeInBytes; - -/* Stop an active AecDump recording */ -- (void)stopAecDump; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCRtpCodecParameters.h b/ios/WebRTC.framework/Headers/RTCRtpCodecParameters.h deleted file mode 100644 index c7ca2f59c0..0000000000 --- a/ios/WebRTC.framework/Headers/RTCRtpCodecParameters.h +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXTERN const NSString * const kRTCRtxCodecName; -RTC_EXTERN const NSString * const kRTCRedCodecName; -RTC_EXTERN const NSString * const kRTCUlpfecCodecName; -RTC_EXTERN const NSString * const kRTCFlexfecCodecName; -RTC_EXTERN const NSString * const kRTCOpusCodecName; -RTC_EXTERN const NSString * const kRTCIsacCodecName; -RTC_EXTERN const NSString * const kRTCL16CodecName; -RTC_EXTERN const NSString * const kRTCG722CodecName; -RTC_EXTERN const NSString * const kRTCIlbcCodecName; -RTC_EXTERN const NSString * const kRTCPcmuCodecName; -RTC_EXTERN const NSString * const kRTCPcmaCodecName; -RTC_EXTERN const NSString * const kRTCDtmfCodecName; -RTC_EXTERN const NSString * const kRTCComfortNoiseCodecName; -RTC_EXTERN const NSString * const kRTCVp8CodecName; -RTC_EXTERN const NSString * const kRTCVp9CodecName; -RTC_EXTERN const NSString * const kRTCH264CodecName; - -/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTCRtpCodecParameters */ -RTC_EXPORT -@interface RTCRtpCodecParameters : NSObject - -/** The RTP payload type. */ -@property(nonatomic, assign) int payloadType; - -/** - * The codec MIME subtype. Valid types are listed in: - * http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-2 - * - * Several supported types are represented by the constants above. - */ -@property(nonatomic, readonly, nonnull) NSString *name; - -/** - * The media type of this codec. Equivalent to MIME top-level type. - * - * Valid values are kRTCMediaStreamTrackKindAudio and - * kRTCMediaStreamTrackKindVideo. - */ -@property(nonatomic, readonly, nonnull) NSString *kind; - -/** The codec clock rate expressed in Hertz. */ -@property(nonatomic, readonly, nullable) NSNumber *clockRate; - -/** - * The number of channels (mono=1, stereo=2). - * Set to null for video codecs. - **/ -@property(nonatomic, readonly, nullable) NSNumber *numChannels; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCRtpEncodingParameters.h b/ios/WebRTC.framework/Headers/RTCRtpEncodingParameters.h deleted file mode 100644 index a956f0d4c3..0000000000 --- a/ios/WebRTC.framework/Headers/RTCRtpEncodingParameters.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCRtpEncodingParameters : NSObject - -/** Controls whether the encoding is currently transmitted. */ -@property(nonatomic, assign) BOOL isActive; - -/** The maximum bitrate to use for the encoding, or nil if there is no - * limit. - */ -@property(nonatomic, copy, nullable) NSNumber *maxBitrateBps; - -/** The SSRC being used by this encoding. */ -@property(nonatomic, readonly, nullable) NSNumber *ssrc; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCRtpParameters.h b/ios/WebRTC.framework/Headers/RTCRtpParameters.h deleted file mode 100644 index bdebf84884..0000000000 --- a/ios/WebRTC.framework/Headers/RTCRtpParameters.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCRtpParameters : NSObject - -/** The currently active encodings in the order of preference. */ -@property(nonatomic, copy) NSArray *encodings; - -/** The negotiated set of send codecs in order of preference. */ -@property(nonatomic, copy) NSArray *codecs; - -- (instancetype)init NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCRtpReceiver.h b/ios/WebRTC.framework/Headers/RTCRtpReceiver.h deleted file mode 100644 index 6c39a25310..0000000000 --- a/ios/WebRTC.framework/Headers/RTCRtpReceiver.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -/** Represents the media type of the RtpReceiver. */ -typedef NS_ENUM(NSInteger, RTCRtpMediaType) { - RTCRtpMediaTypeAudio, - RTCRtpMediaTypeVideo, - RTCRtpMediaTypeData, -}; - -@class RTCRtpReceiver; - -RTC_EXPORT -@protocol RTCRtpReceiverDelegate - -/** Called when the first RTP packet is received. - * - * Note: Currently if there are multiple RtpReceivers of the same media type, - * they will all call OnFirstPacketReceived at once. - * - * For example, if we create three audio receivers, A/B/C, they will listen to - * the same signal from the underneath network layer. Whenever the first audio packet - * is received, the underneath signal will be fired. All the receivers A/B/C will be - * notified and the callback of the receiver's delegate will be called. - * - * The process is the same for video receivers. - */ -- (void)rtpReceiver:(RTCRtpReceiver *)rtpReceiver - didReceiveFirstPacketForMediaType:(RTCRtpMediaType)mediaType; - -@end - -RTC_EXPORT -@protocol RTCRtpReceiver - -/** A unique identifier for this receiver. */ -@property(nonatomic, readonly) NSString *receiverId; - -/** The currently active RTCRtpParameters, as defined in - * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. - * - * The WebRTC specification only defines RTCRtpParameters in terms of senders, - * but this API also applies them to receivers, similar to ORTC: - * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. - */ -@property(nonatomic, readonly) RTCRtpParameters *parameters; - -/** The RTCMediaStreamTrack associated with the receiver. - * Note: reading this property returns a new instance of - * RTCMediaStreamTrack. Use isEqual: instead of == to compare - * RTCMediaStreamTrack instances. - */ -@property(nonatomic, readonly) RTCMediaStreamTrack *track; - -/** The delegate for this RtpReceiver. */ -@property(nonatomic, weak) id delegate; - -@end - -RTC_EXPORT -@interface RTCRtpReceiver : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCRtpSender.h b/ios/WebRTC.framework/Headers/RTCRtpSender.h deleted file mode 100644 index d910c6ceb2..0000000000 --- a/ios/WebRTC.framework/Headers/RTCRtpSender.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@protocol RTCRtpSender - -/** A unique identifier for this sender. */ -@property(nonatomic, readonly) NSString *senderId; - -/** The currently active RTCRtpParameters, as defined in - * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. - */ -@property(nonatomic, copy) RTCRtpParameters *parameters; - -/** The RTCMediaStreamTrack associated with the sender. - * Note: reading this property returns a new instance of - * RTCMediaStreamTrack. Use isEqual: instead of == to compare - * RTCMediaStreamTrack instances. - */ -@property(nonatomic, copy, nullable) RTCMediaStreamTrack *track; - -@end - -RTC_EXPORT -@interface RTCRtpSender : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCSSLAdapter.h b/ios/WebRTC.framework/Headers/RTCSSLAdapter.h deleted file mode 100644 index c862d29792..0000000000 --- a/ios/WebRTC.framework/Headers/RTCSSLAdapter.h +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -/** - * Initialize and clean up the SSL library. Failure is fatal. These call the - * corresponding functions in webrtc/rtc_base/ssladapter.h. - */ -RTC_EXTERN BOOL RTCInitializeSSL(); -RTC_EXTERN BOOL RTCCleanupSSL(); diff --git a/ios/WebRTC.framework/Headers/RTCSessionDescription.h b/ios/WebRTC.framework/Headers/RTCSessionDescription.h deleted file mode 100644 index 41439804a8..0000000000 --- a/ios/WebRTC.framework/Headers/RTCSessionDescription.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -/** - * Represents the session description type. This exposes the same types that are - * in C++, which doesn't include the rollback type that is in the W3C spec. - */ -typedef NS_ENUM(NSInteger, RTCSdpType) { - RTCSdpTypeOffer, - RTCSdpTypePrAnswer, - RTCSdpTypeAnswer, -}; - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT -@interface RTCSessionDescription : NSObject - -/** The type of session description. */ -@property(nonatomic, readonly) RTCSdpType type; - -/** The SDP string representation of this session description. */ -@property(nonatomic, readonly) NSString *sdp; - -- (instancetype)init NS_UNAVAILABLE; - -/** Initialize a session description with a type and SDP string. */ -- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp - NS_DESIGNATED_INITIALIZER; - -+ (NSString *)stringForType:(RTCSdpType)type; - -+ (RTCSdpType)typeForString:(NSString *)string; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCTracing.h b/ios/WebRTC.framework/Headers/RTCTracing.h deleted file mode 100644 index 136479118b..0000000000 --- a/ios/WebRTC.framework/Headers/RTCTracing.h +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -RTC_EXTERN void RTCSetupInternalTracer(); -/** Starts capture to specified file. Must be a valid writable path. - * Returns YES if capture starts. - */ -RTC_EXTERN BOOL RTCStartInternalCapture(NSString *filePath); -RTC_EXTERN void RTCStopInternalCapture(); -RTC_EXTERN void RTCShutdownInternalTracer(); diff --git a/ios/WebRTC.framework/Headers/RTCVideoCapturer.h b/ios/WebRTC.framework/Headers/RTCVideoCapturer.h deleted file mode 100644 index 3addec83b1..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoCapturer.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -NS_ASSUME_NONNULL_BEGIN - -@class RTCVideoCapturer; - -RTC_EXPORT -@protocol RTCVideoCapturerDelegate -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame; -@end - -RTC_EXPORT -@interface RTCVideoCapturer : NSObject - -@property(nonatomic, readonly, weak) id delegate; - -- (instancetype)initWithDelegate:(id)delegate; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoCodec.h b/ios/WebRTC.framework/Headers/RTCVideoCodec.h deleted file mode 100644 index ad56a5dea9..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoCodec.h +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -/** Represents an encoded frame's type. */ -typedef NS_ENUM(NSUInteger, RTCFrameType) { - RTCFrameTypeEmptyFrame = 0, - RTCFrameTypeAudioFrameSpeech = 1, - RTCFrameTypeAudioFrameCN = 2, - RTCFrameTypeVideoFrameKey = 3, - RTCFrameTypeVideoFrameDelta = 4, -}; - -typedef NS_ENUM(NSUInteger, RTCVideoContentType) { - RTCVideoContentTypeUnspecified, - RTCVideoContentTypeScreenshare, -}; - -/** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */ -RTC_EXPORT -@interface RTCEncodedImage : NSObject - -@property(nonatomic, strong) NSData *buffer; -@property(nonatomic, assign) int32_t encodedWidth; -@property(nonatomic, assign) int32_t encodedHeight; -@property(nonatomic, assign) uint32_t timeStamp; -@property(nonatomic, assign) int64_t captureTimeMs; -@property(nonatomic, assign) int64_t ntpTimeMs; -@property(nonatomic, assign) uint8_t flags; -@property(nonatomic, assign) int64_t encodeStartMs; -@property(nonatomic, assign) int64_t encodeFinishMs; -@property(nonatomic, assign) RTCFrameType frameType; -@property(nonatomic, assign) RTCVideoRotation rotation; -@property(nonatomic, assign) BOOL completeFrame; -@property(nonatomic, strong) NSNumber *qp; -@property(nonatomic, assign) RTCVideoContentType contentType; - -@end - -/** Information for header. Corresponds to webrtc::RTPFragmentationHeader. */ -RTC_EXPORT -@interface RTCRtpFragmentationHeader : NSObject - -@property(nonatomic, strong) NSArray *fragmentationOffset; -@property(nonatomic, strong) NSArray *fragmentationLength; -@property(nonatomic, strong) NSArray *fragmentationTimeDiff; -@property(nonatomic, strong) NSArray *fragmentationPlType; - -@end - -/** Implement this protocol to pass codec specific info from the encoder. - * Corresponds to webrtc::CodecSpecificInfo. - */ -RTC_EXPORT -@protocol RTCCodecSpecificInfo - -@end - -/** Callback block for encoder. */ -typedef BOOL (^RTCVideoEncoderCallback)(RTCEncodedImage *frame, - id info, - RTCRtpFragmentationHeader *header); - -/** Callback block for decoder. */ -typedef void (^RTCVideoDecoderCallback)(RTCVideoFrame *frame); - -typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) { - RTCVideoCodecModeRealtimeVideo, - RTCVideoCodecModeScreensharing, -}; - -/** Holds information to identify a codec. Corresponds to cricket::VideoCodec. */ -RTC_EXPORT -@interface RTCVideoCodecInfo : NSObject - -- (instancetype)init NS_UNAVAILABLE; - -- (instancetype)initWithName:(NSString *)name; - -- (instancetype)initWithName:(NSString *)name - parameters:(nullable NSDictionary *)parameters - NS_DESIGNATED_INITIALIZER; - -- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info; - -@property(nonatomic, readonly) NSString *name; -@property(nonatomic, readonly) NSDictionary *parameters; - -@end - -/** Settings for encoder. Corresponds to webrtc::VideoCodec. */ -RTC_EXPORT -@interface RTCVideoEncoderSettings : NSObject - -@property(nonatomic, strong) NSString *name; - -@property(nonatomic, assign) unsigned short width; -@property(nonatomic, assign) unsigned short height; - -@property(nonatomic, assign) unsigned int startBitrate; // kilobits/sec. -@property(nonatomic, assign) unsigned int maxBitrate; -@property(nonatomic, assign) unsigned int minBitrate; -@property(nonatomic, assign) unsigned int targetBitrate; - -@property(nonatomic, assign) uint32_t maxFramerate; - -@property(nonatomic, assign) unsigned int qpMax; -@property(nonatomic, assign) RTCVideoCodecMode mode; - -@end - -/** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */ -RTC_EXPORT -@interface RTCVideoEncoderQpThresholds : NSObject - -- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high; - -@property(nonatomic, readonly) NSInteger low; -@property(nonatomic, readonly) NSInteger high; - -@end - -/** Protocol for encoder implementations. */ -RTC_EXPORT -@protocol RTCVideoEncoder - -- (void)setCallback:(RTCVideoEncoderCallback)callback; -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings - numberOfCores:(int)numberOfCores; -- (NSInteger)releaseEncoder; -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(id)info - frameTypes:(NSArray *)frameTypes; -- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate; -- (NSString *)implementationName; - -/** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to - * keep the QP from the encoded images within the given range. Returning nil from this function - * disables quality scaling. */ -- (RTCVideoEncoderQpThresholds *)scalingSettings; - -@end - -/** Protocol for decoder implementations. */ -RTC_EXPORT -@protocol RTCVideoDecoder - -- (void)setCallback:(RTCVideoDecoderCallback)callback; -- (NSInteger)startDecodeWithSettings:(RTCVideoEncoderSettings *)settings - numberOfCores:(int)numberOfCores; -- (NSInteger)releaseDecoder; -- (NSInteger)decode:(RTCEncodedImage *)encodedImage - missingFrames:(BOOL)missingFrames - fragmentationHeader:(RTCRtpFragmentationHeader *)fragmentationHeader - codecSpecificInfo:(__nullable id)info - renderTimeMs:(int64_t)renderTimeMs; -- (NSString *)implementationName; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoCodecFactory.h b/ios/WebRTC.framework/Headers/RTCVideoCodecFactory.h deleted file mode 100644 index aa5f746398..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoCodecFactory.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. */ -RTC_EXPORT -@protocol RTCVideoEncoderFactory - -- (id)createEncoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? - -@end - -/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. */ -RTC_EXPORT -@protocol RTCVideoDecoderFactory - -- (id)createDecoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoCodecH264.h b/ios/WebRTC.framework/Headers/RTCVideoCodecH264.h deleted file mode 100644 index 1c307ebe84..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoCodecH264.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -/** Class for H264 specific config. */ -typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) { - RTCH264PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed - RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed -}; - -RTC_EXPORT -@interface RTCCodecSpecificInfoH264 : NSObject - -@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode; - -@end - -/** Encoder. */ -RTC_EXPORT -@interface RTCVideoEncoderH264 : NSObject - -- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo; - -@end - -/** Decoder. */ -RTC_EXPORT -@interface RTCVideoDecoderH264 : NSObject -@end - -/** Encoder factory. */ -RTC_EXPORT -@interface RTCVideoEncoderFactoryH264 : NSObject -@end - -/** Decoder factory. */ -RTC_EXPORT -@interface RTCVideoDecoderFactoryH264 : NSObject -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoDecoderOpenH264.h b/ios/WebRTC.framework/Headers/RTCVideoDecoderOpenH264.h deleted file mode 100644 index 6b20bbc736..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoDecoderOpenH264.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -RTC_EXPORT -@interface RTCVideoDecoderOpenH264 : NSObject - -/* This returns a OpenH264 decoder that can be returned from a RTCVideoDecoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)openH264Decoder; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoDecoderVP8.h b/ios/WebRTC.framework/Headers/RTCVideoDecoderVP8.h deleted file mode 100644 index 962b6312f7..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoDecoderVP8.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -RTC_EXPORT -@interface RTCVideoDecoderVP8 : NSObject - -/* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp8Decoder; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoDecoderVP9.h b/ios/WebRTC.framework/Headers/RTCVideoDecoderVP9.h deleted file mode 100644 index faf66ef065..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoDecoderVP9.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -RTC_EXPORT -@interface RTCVideoDecoderVP9 : NSObject - -/* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp9Decoder; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoEncoderOpenH264.h b/ios/WebRTC.framework/Headers/RTCVideoEncoderOpenH264.h deleted file mode 100644 index 9b76611ffb..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoEncoderOpenH264.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -RTC_EXPORT -@interface RTCVideoEncoderOpenH264 : NSObject - -/* This returns a OpenH264 encoder that can be returned from a RTCVideoEncoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)openH264Encoder; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoEncoderVP8.h b/ios/WebRTC.framework/Headers/RTCVideoEncoderVP8.h deleted file mode 100644 index e63b24dde7..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoEncoderVP8.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -RTC_EXPORT -@interface RTCVideoEncoderVP8 : NSObject - -/* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp8Encoder; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoEncoderVP9.h b/ios/WebRTC.framework/Headers/RTCVideoEncoderVP9.h deleted file mode 100644 index cef8c82e96..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoEncoderVP9.h +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import - -RTC_EXPORT -@interface RTCVideoEncoderVP9 : NSObject - -/* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into - * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be - * used independently from the RTCPeerConnectionFactory. - */ -+ (id)vp9Encoder; - -@end diff --git a/ios/WebRTC.framework/Headers/RTCVideoFrame.h b/ios/WebRTC.framework/Headers/RTCVideoFrame.h deleted file mode 100644 index aaf62abc9a..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoFrame.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -typedef NS_ENUM(NSInteger, RTCVideoRotation) { - RTCVideoRotation_0 = 0, - RTCVideoRotation_90 = 90, - RTCVideoRotation_180 = 180, - RTCVideoRotation_270 = 270, -}; - -@protocol RTCVideoFrameBuffer; - -// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. -RTC_EXPORT -@interface RTCVideoFrame : NSObject - -/** Width without rotation applied. */ -@property(nonatomic, readonly) int width; - -/** Height without rotation applied. */ -@property(nonatomic, readonly) int height; -@property(nonatomic, readonly) RTCVideoRotation rotation; - -/** Timestamp in nanoseconds. */ -@property(nonatomic, readonly) int64_t timeStampNs; - -/** Timestamp 90 kHz. */ -@property(nonatomic, assign) int32_t timeStamp; - -@property(nonatomic, readonly) id buffer; - -- (instancetype)init NS_UNAVAILABLE; -- (instancetype)new NS_UNAVAILABLE; - -/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp. - * Deprecated - initialize with a RTCCVPixelBuffer instead - */ -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - rotation:(RTCVideoRotation)rotation - timeStampNs:(int64_t)timeStampNs - DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); - -/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and - * scaling. Cropping will be applied first on the pixel buffer, followed by - * scaling to the final resolution of scaledWidth x scaledHeight. - */ -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - scaledWidth:(int)scaledWidth - scaledHeight:(int)scaledHeight - cropWidth:(int)cropWidth - cropHeight:(int)cropHeight - cropX:(int)cropX - cropY:(int)cropY - rotation:(RTCVideoRotation)rotation - timeStampNs:(int64_t)timeStampNs - DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); - -/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. - */ -- (instancetype)initWithBuffer:(id)frameBuffer - rotation:(RTCVideoRotation)rotation - timeStampNs:(int64_t)timeStampNs; - -/** Return a frame that is guaranteed to be I420, i.e. it is possible to access - * the YUV data on it. - */ -- (RTCVideoFrame *)newI420VideoFrame; - -- (void)CopyI420BufferToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoFrameBuffer.h b/ios/WebRTC.framework/Headers/RTCVideoFrameBuffer.h deleted file mode 100644 index ffcdf26042..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoFrameBuffer.h +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -@protocol RTCI420Buffer; - -// RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer. -RTC_EXPORT -@protocol RTCVideoFrameBuffer - -@property(nonatomic, readonly) int width; -@property(nonatomic, readonly) int height; - -- (id)toI420; - -@end - -/** Protocol for RTCVideoFrameBuffers containing YUV planar data. */ -@protocol RTCYUVPlanarBuffer - -@property(nonatomic, readonly) int chromaWidth; -@property(nonatomic, readonly) int chromaHeight; -@property(nonatomic, readonly) const uint8_t *dataY; -@property(nonatomic, readonly) const uint8_t *dataU; -@property(nonatomic, readonly) const uint8_t *dataV; -@property(nonatomic, readonly) int strideY; -@property(nonatomic, readonly) int strideU; -@property(nonatomic, readonly) int strideV; - -- (instancetype)initWithWidth:(int)width height:(int)height; -- (instancetype)initWithWidth:(int)width - height:(int)height - strideY:(int)strideY - strideU:(int)strideU - strideV:(int)strideV; - -@end - -/** Extension of the YUV planar data buffer with mutable data access */ -@protocol RTCMutableYUVPlanarBuffer - -@property(nonatomic, readonly) uint8_t *mutableDataY; -@property(nonatomic, readonly) uint8_t *mutableDataU; -@property(nonatomic, readonly) uint8_t *mutableDataV; - -@end - -/** Protocol for RTCYUVPlanarBuffers containing I420 data */ -@protocol RTCI420Buffer -@end - -/** Extension of the I420 buffer with mutable data access */ -@protocol RTCMutableI420Buffer -@end - -/** RTCVideoFrameBuffer containing a CVPixelBufferRef */ -RTC_EXPORT -@interface RTCCVPixelBuffer : NSObject - -@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer; -@property(nonatomic, readonly) int cropX; -@property(nonatomic, readonly) int cropY; - -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer; -- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - adaptedWidth:(int)adaptedWidth - adaptedHeight:(int)adaptedHeight - cropWidth:(int)cropWidth - cropHeight:(int)cropHeight - cropX:(int)cropX - cropY:(int)cropY; - -- (BOOL)requiresCropping; -- (BOOL)requiresScalingToWidth:(int)width height:(int)height; -- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height; -/** The minimum size of the |tmpBuffer| must be the number of bytes returned from the - * bufferSizeForCroppingAndScalingToWidth:height: method. - */ -- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t *)tmpBuffer; - -@end - -/** RTCI420Buffer implements the RTCI420Buffer protocol */ -RTC_EXPORT -@interface RTCI420Buffer : NSObject -@end - -/** Mutable version of RTCI420Buffer */ -RTC_EXPORT -@interface RTCMutableI420Buffer : RTCI420Buffer -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoRenderer.h b/ios/WebRTC.framework/Headers/RTCVideoRenderer.h deleted file mode 100644 index 5e2e820918..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoRenderer.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#if TARGET_OS_IPHONE -#import -#endif - -#import - -NS_ASSUME_NONNULL_BEGIN - -@class RTCVideoFrame; - -RTC_EXPORT -@protocol RTCVideoRenderer - -/** The size of the frame. */ -- (void)setSize:(CGSize)size; - -/** The frame to be displayed. */ -- (void)renderFrame:(nullable RTCVideoFrame *)frame; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoSource.h b/ios/WebRTC.framework/Headers/RTCVideoSource.h deleted file mode 100644 index a9ebc06c88..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoSource.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import -#import -#import - -NS_ASSUME_NONNULL_BEGIN - -RTC_EXPORT - -@interface RTCVideoSource : RTCMediaSource - -- (instancetype)init NS_UNAVAILABLE; - -/** - * Calling this function will cause frames to be scaled down to the - * requested resolution. Also, frames will be cropped to match the - * requested aspect ratio, and frames will be dropped to match the - * requested fps. The requested aspect ratio is orientation agnostic and - * will be adjusted to maintain the input orientation, so it doesn't - * matter if e.g. 1280x720 or 720x1280 is requested. - */ -- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoTrack.h b/ios/WebRTC.framework/Headers/RTCVideoTrack.h deleted file mode 100644 index 899d7c3478..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoTrack.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -@protocol RTCVideoRenderer; -@class RTCPeerConnectionFactory; -@class RTCVideoSource; - -RTC_EXPORT -@interface RTCVideoTrack : RTCMediaStreamTrack - -/** The video source for this video track. */ -@property(nonatomic, readonly) RTCVideoSource *source; - -- (instancetype)init NS_UNAVAILABLE; - -/** Register a renderer that will render all frames received on this track. */ -- (void)addRenderer:(id)renderer; - -/** Deregister a renderer. */ -- (void)removeRenderer:(id)renderer; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/RTCVideoViewShading.h b/ios/WebRTC.framework/Headers/RTCVideoViewShading.h deleted file mode 100644 index 6f3ece94a1..0000000000 --- a/ios/WebRTC.framework/Headers/RTCVideoViewShading.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import - -NS_ASSUME_NONNULL_BEGIN - -/** - * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in - * rendering for the RTCEAGLVideoView/RTCNSGLVideoView. - */ -RTC_EXPORT -@protocol RTCVideoViewShading - -/** Callback for I420 frames. Each plane is given as a texture. */ -- (void)applyShadingForFrameWithWidth:(int)width - height:(int)height - rotation:(RTCVideoRotation)rotation - yPlane:(GLuint)yPlane - uPlane:(GLuint)uPlane - vPlane:(GLuint)vPlane; - -/** Callback for NV12 frames. Each plane is given as a texture. */ -- (void)applyShadingForFrameWithWidth:(int)width - height:(int)height - rotation:(RTCVideoRotation)rotation - yPlane:(GLuint)yPlane - uvPlane:(GLuint)uvPlane; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/WebRTC.framework/Headers/UIDevice+RTCDevice.h b/ios/WebRTC.framework/Headers/UIDevice+RTCDevice.h deleted file mode 100644 index 7e01c47f39..0000000000 --- a/ios/WebRTC.framework/Headers/UIDevice+RTCDevice.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -typedef NS_ENUM(NSInteger, RTCDeviceType) { - RTCDeviceTypeUnknown, - RTCDeviceTypeIPhone1G, - RTCDeviceTypeIPhone3G, - RTCDeviceTypeIPhone3GS, - RTCDeviceTypeIPhone4, - RTCDeviceTypeIPhone4Verizon, - RTCDeviceTypeIPhone4S, - RTCDeviceTypeIPhone5GSM, - RTCDeviceTypeIPhone5GSM_CDMA, - RTCDeviceTypeIPhone5CGSM, - RTCDeviceTypeIPhone5CGSM_CDMA, - RTCDeviceTypeIPhone5SGSM, - RTCDeviceTypeIPhone5SGSM_CDMA, - RTCDeviceTypeIPhone6Plus, - RTCDeviceTypeIPhone6, - RTCDeviceTypeIPhone6S, - RTCDeviceTypeIPhone6SPlus, - RTCDeviceTypeIPodTouch1G, - RTCDeviceTypeIPodTouch2G, - RTCDeviceTypeIPodTouch3G, - RTCDeviceTypeIPodTouch4G, - RTCDeviceTypeIPodTouch5G, - RTCDeviceTypeIPad, - RTCDeviceTypeIPad2Wifi, - RTCDeviceTypeIPad2GSM, - RTCDeviceTypeIPad2CDMA, - RTCDeviceTypeIPad2Wifi2, - RTCDeviceTypeIPadMiniWifi, - RTCDeviceTypeIPadMiniGSM, - RTCDeviceTypeIPadMiniGSM_CDMA, - RTCDeviceTypeIPad3Wifi, - RTCDeviceTypeIPad3GSM_CDMA, - RTCDeviceTypeIPad3GSM, - RTCDeviceTypeIPad4Wifi, - RTCDeviceTypeIPad4GSM, - RTCDeviceTypeIPad4GSM_CDMA, - RTCDeviceTypeIPadAirWifi, - RTCDeviceTypeIPadAirCellular, - RTCDeviceTypeIPadMini2GWifi, - RTCDeviceTypeIPadMini2GCellular, - RTCDeviceTypeSimulatori386, - RTCDeviceTypeSimulatorx86_64, -}; - -@interface UIDevice (RTCDevice) - -+ (RTCDeviceType)deviceType; -+ (NSString *)stringForDeviceType:(RTCDeviceType)deviceType; -+ (BOOL)isIOS9OrLater; -+ (BOOL)isIOS11OrLater; - -@end diff --git a/ios/WebRTC.framework/Headers/WebRTC.h b/ios/WebRTC.framework/Headers/WebRTC.h deleted file mode 100644 index 64ca544288..0000000000 --- a/ios/WebRTC.framework/Headers/WebRTC.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#if TARGET_OS_IPHONE -#import -#import -#endif -#import -#import -#import -#import -#if TARGET_OS_IPHONE -#import -#endif -#import -#import -#import -#import -#if TARGET_OS_IPHONE -#import -#import -#import -#endif -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#if TARGET_OS_IPHONE -#import -#endif diff --git a/ios/WebRTC.framework/Info.plist b/ios/WebRTC.framework/Info.plist deleted file mode 100644 index 865f95d2d6..0000000000 Binary files a/ios/WebRTC.framework/Info.plist and /dev/null differ diff --git a/ios/WebRTC.framework/Modules/module.modulemap b/ios/WebRTC.framework/Modules/module.modulemap deleted file mode 100644 index cd485a4e81..0000000000 --- a/ios/WebRTC.framework/Modules/module.modulemap +++ /dev/null @@ -1,6 +0,0 @@ -framework module WebRTC { - umbrella header "WebRTC.h" - - export * - module * { export * } -} diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec new file mode 100644 index 0000000000..778368a8a0 --- /dev/null +++ b/ios/flutter_webrtc.podspec @@ -0,0 +1,26 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'flutter_webrtc' + s.version = '0.14.0' + s.summary = 'Flutter WebRTC plugin for iOS.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'https://github.com/cloudwebrtc/flutter-webrtc' + s.license = { :file => '../LICENSE' } + s.author = { 'CloudWebRTC' => 'duanweiwei1982@gmail.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'Flutter' + s.dependency 'WebRTC-SDK', '125.6422.07' + s.ios.deployment_target = '13.0' + s.static_framework = true + s.pod_target_xcconfig = { + 'CLANG_CXX_LANGUAGE_STANDARD' => 'c++14', + 'USER_HEADER_SEARCH_PATHS' => 'Classes/**/*.h' + } + s.libraries = 'c++' +end diff --git a/ios/webrtc.podspec b/ios/webrtc.podspec deleted file mode 100644 index eeaf5128f7..0000000000 --- a/ios/webrtc.podspec +++ /dev/null @@ -1,21 +0,0 @@ -# -# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html -# -Pod::Spec.new do |s| - s.name = 'webrtc' - s.version = '0.0.1' - s.summary = 'A new flutter plugin project.' - s.description = <<-DESC -A new flutter plugin project. - DESC - s.homepage = 'http://www.cloudwebrtc.com' - s.license = { :file => '../LICENSE' } - s.author = { 'Your Company' => 'email@example.com' } - s.source = { :path => '.' } - s.source_files = 'Classes/**/*' - s.public_header_files = 'Classes/**/*.h' - s.dependency 'Flutter' - s.vendored_frameworks = 'WebRTC.framework' - s.ios.deployment_target = '8.0' -end - diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart new file mode 100644 index 0000000000..b7dd3a8fc6 --- /dev/null +++ b/lib/flutter_webrtc.dart @@ -0,0 +1,25 @@ +library flutter_webrtc; + +export 'package:webrtc_interface/webrtc_interface.dart' + hide MediaDevices, MediaRecorder, Navigator; + +export 'src/helper.dart'; +export 'src/desktop_capturer.dart'; +export 'src/media_devices.dart'; +export 'src/media_recorder.dart'; +export 'src/video_renderer_extension.dart'; +export 'src/native/factory_impl.dart' + if (dart.library.js_interop) 'src/web/factory_impl.dart'; +export 'src/native/rtc_video_renderer_impl.dart' + if (dart.library.js_interop) 'src/web/rtc_video_renderer_impl.dart'; +export 'src/native/rtc_video_view_impl.dart' + if (dart.library.js_interop) 'src/web/rtc_video_view_impl.dart'; +export 'src/native/utils.dart' + if (dart.library.js_interop) 'src/web/utils.dart'; +export 'src/native/adapter_type.dart'; +export 'src/native/camera_utils.dart'; +export 'src/native/audio_management.dart'; +export 'src/native/android/audio_configuration.dart'; +export 'src/native/ios/audio_configuration.dart'; +export 'src/native/rtc_video_platform_view_controller.dart'; +export 'src/native/rtc_video_platform_view.dart'; diff --git a/lib/get_user_media.dart b/lib/get_user_media.dart deleted file mode 100644 index 47c46f224d..0000000000 --- a/lib/get_user_media.dart +++ /dev/null @@ -1,58 +0,0 @@ -import 'dart:async'; -import 'package:flutter/services.dart'; -import 'package:webrtc/media_stream.dart'; -import 'package:webrtc/utils.dart'; - -class navigator { - static Future getUserMedia( - Map mediaConstraints) async { - MethodChannel channel = WebRTC.methodChannel(); - try { - final Map response = await channel.invokeMethod( - 'getUserMedia', - {'constraints': mediaConstraints}, - ); - String streamId = response["streamId"]; - MediaStream stream = new MediaStream(streamId); - stream.setMediaTracks(response['audioTracks'], response['videoTracks']); - return stream; - } on PlatformException catch (e) { - throw 'Unable to getUserMedia: ${e.message}'; - } - } - -/* Implement screen sharing, - * use MediaProjection for Android and use ReplayKit for iOS - * TODO: implement for native layer. - * */ -static Future getDisplayMedia( - Map mediaConstraints) async { - MethodChannel channel = WebRTC.methodChannel(); - try { - final Map response = await channel.invokeMethod( - 'getDisplayMedia', - {'constraints': mediaConstraints}, - ); - String streamId = response["streamId"]; - MediaStream stream = new MediaStream(streamId); - stream.setMediaTracks(response['audioTracks'], response['videoTracks']); - return stream; - } on PlatformException catch (e) { - throw 'Unable to getDisplayMedia: ${e.message}'; - } - } - - static Future> getSources() async { - MethodChannel channel = WebRTC.methodChannel(); - try { - final Map response = await channel.invokeMethod( - 'getSources', - {}, - ); - List sources = response["sources"]; - return sources; - } on PlatformException catch (e) { - throw 'Unable to getSources: ${e.message}'; - } - } -} diff --git a/lib/media_stream.dart b/lib/media_stream.dart deleted file mode 100644 index a0f6f286ca..0000000000 --- a/lib/media_stream.dart +++ /dev/null @@ -1,78 +0,0 @@ -import 'dart:async'; -import 'package:flutter/services.dart'; -import 'package:webrtc/media_stream_track.dart'; -import 'package:webrtc/utils.dart'; - - - -class MediaStream { - MethodChannel _channel = WebRTC.methodChannel(); - String _streamId; - List _audioTracks = new List(); - List _videoTracks = new List(); - MediaStream(this._streamId); - - void setMediaTracks(List audioTracks, List videoTracks){ - audioTracks.forEach((track){ - _audioTracks.add(new MediaStreamTrack(track["id"], track["label"], track["kind"], track["enabled"])); - }); - videoTracks.forEach((track){ - _videoTracks.add(new MediaStreamTrack(track["id"], track["label"], track["kind"], track["enabled"])); - }); - } - - Future getMediaTracks() async { - _channel = WebRTC.methodChannel(); - final Map response = await _channel.invokeMethod( - 'mediaStreamGetTracks', - {'streamId': _streamId}, - ); - - List audioTracks = response['audioTracks']; - audioTracks.forEach((track){ - _audioTracks.add(new MediaStreamTrack(track["id"], track["label"], track["kind"], track["enabled"])); - }); - - List videoTracks = response['videoTracks']; - videoTracks.forEach((track){ - _videoTracks.add(new MediaStreamTrack(track["id"], track["label"], track["kind"], track["enabled"])); - }); - } - - String get id => _streamId; - addTrack(MediaStreamTrack track) { - if (track.kind == 'audio') - _audioTracks.add(track); - else - _videoTracks.add(track); - - _channel.invokeMethod('mediaStreamAddTrack', - {'streamId': _streamId, 'trackId': track.id}); - } - - removeTrack(MediaStreamTrack track) { - if (track.kind == 'audio') - _audioTracks.remove(track); - else - _videoTracks.remove(track); - - _channel.invokeMethod('mediaStreamRemoveTrack', - {'streamId': _streamId, 'trackId': track.id}); - } - - List getAudioTracks() { - return _audioTracks; - } - - List getVideoTracks() { - return _videoTracks; - } - - @override - Future dispose() async { - await _channel.invokeMethod( - 'streamDispose', - {'streamId': _streamId}, - ); - } -} diff --git a/lib/media_stream_track.dart b/lib/media_stream_track.dart deleted file mode 100644 index 452634364b..0000000000 --- a/lib/media_stream_track.dart +++ /dev/null @@ -1,30 +0,0 @@ -import 'package:flutter/services.dart'; -import 'package:webrtc/utils.dart'; - -class MediaStreamTrack { - MethodChannel _channel = WebRTC.methodChannel(); - String _trackId; - String _label; - String _kind; - bool _enabled; - - MediaStreamTrack(this._trackId, this._label, this._kind, this._enabled); - - set enabled(bool enabled) { - _channel.invokeMethod('mediaStreamTrackEnabled', - {'trackId': _trackId, 'enabled': enabled}); - _enabled = enabled; - } - - bool get enabled => _enabled; - String get label => _label; - String get kind => _kind; - String get id => _trackId; - - void dispose() async { - await _channel.invokeMethod( - 'trackDispose', - {'trackId': _trackId}, - ); - } -} diff --git a/lib/rtc_data_channel.dart b/lib/rtc_data_channel.dart deleted file mode 100644 index aa7df1362d..0000000000 --- a/lib/rtc_data_channel.dart +++ /dev/null @@ -1,95 +0,0 @@ -import 'dart:async'; -import 'package:flutter/services.dart'; -import 'package:webrtc/utils.dart'; - -class RTCDataChannelInit { - bool ordered = true; - int maxRetransmitTime = -1; - int maxRetransmits = -1; - String protocol = 'sctp'; //sctp | quic - String binaryType = 'text'; // "binary" || text - bool negotiated = false; - int id = 0; - Map toMap() { - return { - 'ordered': ordered, - 'maxRetransmitTime': maxRetransmitTime, - 'maxRetransmits': maxRetransmits, - 'protocol': protocol, - 'negotiated': negotiated, - 'id': id - }; - } -} - -enum RTCDataChannelState { - RTCDataChannelConnecting, - RTCDataChannelOpen, - RTCDataChannelClosing, - RTCDataChannelClosed, -} - -typedef void RTCDataChannelStateCallback(RTCDataChannelState state); -typedef void RTCDataChannelOnMessageCallback(String data); - -class RTCDataChannel { - String _peerConnectionId; - String _label; - int _dataChannelId; - MethodChannel _channel = WebRTC.methodChannel(); - StreamSubscription _eventSubscription; - RTCDataChannelStateCallback onDataChannelState; - RTCDataChannelOnMessageCallback onMessage; - - RTCDataChannel(this._peerConnectionId, this._label, this._dataChannelId){ - _eventSubscription = _eventChannelFor(_dataChannelId) - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - - /* - * RTCDataChannel event listener. - */ - void eventListener(dynamic event) { - final Map map = event; - switch (map['event']) { - case 'dataChannelStateChanged': - int dataChannelId = map['id']; - String state = map['state']; - if (this.onDataChannelState != null) - this.onDataChannelState(rtcDataChannelStateForString(state)); - break; - case 'dataChannelReceiveMessage': - int dataChannelId = map['id']; - String type = map['type']; - String data = map['data']; - if (this.onMessage != null) - this.onMessage(data); - break; - } - } - - EventChannel _eventChannelFor(int dataChannelId) { - return new EventChannel( - 'cloudwebrtc.com/WebRTC/dataChannelEvent$dataChannelId'); - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - void send(String type, dynamic data){ - _channel.invokeMethod('dataChannelSend', - { 'peerConnectionId': _peerConnectionId, - 'dataChannelId': _dataChannelId, - 'type': type, - 'data': data}); - } - - Future close() async { - await _eventSubscription?.cancel(); - await _channel.invokeMethod('dataChannelClose', - {'peerConnectionId': _peerConnectionId, 'dataChannelId': _dataChannelId}); - } -} diff --git a/lib/rtc_dtmf_sender.dart b/lib/rtc_dtmf_sender.dart deleted file mode 100644 index 78455056e3..0000000000 --- a/lib/rtc_dtmf_sender.dart +++ /dev/null @@ -1,4 +0,0 @@ - -class RTCDTMFSender { - -} \ No newline at end of file diff --git a/lib/rtc_ice_candidate.dart b/lib/rtc_ice_candidate.dart deleted file mode 100644 index 4ae192f58d..0000000000 --- a/lib/rtc_ice_candidate.dart +++ /dev/null @@ -1,16 +0,0 @@ - -class RTCIceCandidate { - String candidate; - String sdpMid; - int sdpMlineIndex; - - RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMlineIndex); - - dynamic toMap() { - return { - "candidate": candidate, - "sdpMid": sdpMid, - "sdpMLineIndex": sdpMlineIndex - }; - } -} diff --git a/lib/rtc_peerconnection.dart b/lib/rtc_peerconnection.dart deleted file mode 100644 index 41e93111ff..0000000000 --- a/lib/rtc_peerconnection.dart +++ /dev/null @@ -1,307 +0,0 @@ -import 'dart:async'; -import 'package:flutter/services.dart'; -import 'package:webrtc/media_stream.dart'; -import 'package:webrtc/media_stream_track.dart'; -import 'package:webrtc/rtc_data_channel.dart'; -import 'package:webrtc/rtc_ice_candidate.dart'; -import 'package:webrtc/rtc_session_description.dart'; -import 'package:webrtc/rtc_stats_report.dart'; -import 'package:webrtc/utils.dart'; - - -enum RTCSignalingState { - RTCSignalingStateStable, - RTCSignalingStateHaveLocalOffer, - RTCSignalingStateHaveRemoteOffer, - RTCSignalingStateHaveLocalPrAnswer, - RTCSignalingStateHaveRemotePrAnswer, - RTCSignalingStateClosed -} - -enum RTCIceGatheringState { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete -} - -enum RTCIceConnectionState { - RTCIceConnectionStateNew, - RTCIceConnectionStateChecking, - RTCIceConnectionStateCompleted, - RTCIceConnectionStateConnected, - RTCIceConnectionStateCount, - RTCIceConnectionStateFailed, - RTCIceConnectionStateDisconnected, - RTCIceConnectionStateClosed, -} - -/* - * 回调类型定义. - */ -typedef void SignalingStateCallback(RTCSignalingState state); -typedef void IceGatheringStateCallback(RTCIceGatheringState state); -typedef void IceConnectionStateCallback(RTCIceConnectionState state); -typedef void IceCandidateCallback(RTCIceCandidate candidate); -typedef void AddStreamCallback(MediaStream stream); -typedef void RemoveStreamCallback(MediaStream stream); -typedef void AddTrackCallback(MediaStream stream, MediaStreamTrack track); -typedef void RemoveTrackCallback(MediaStream stream, MediaStreamTrack track); -typedef void RTCDataChannelCallback(RTCDataChannel channel); - -/* - * PeerConnection - */ -class RTCPeerConnection { - // private: - String _peerConnectionId; - MethodChannel _channel = WebRTC.methodChannel(); - StreamSubscription _eventSubscription; - List _localStreams; - List _remoteStreams; - RTCDataChannel _dataChannel; - - // public: delegate - SignalingStateCallback onSignalingState; - IceGatheringStateCallback onIceGatheringState; - IceConnectionStateCallback onIceConnectionState; - IceCandidateCallback onIceCandidate; - AddStreamCallback onAddStream; - RemoveStreamCallback onRemoveStream; - AddTrackCallback onAddTrack; - RemoveTrackCallback onRemoveTrack; - RTCDataChannelCallback onDataChannel; - dynamic onRenegotiationNeeded; - - final Map defaultSdpConstraints = { - "mandatory": { - "OfferToReceiveAudio": true, - "OfferToReceiveVideo": true, - }, - "optional": [], - }; - - RTCPeerConnection(this._peerConnectionId) { - _eventSubscription = _eventChannelFor(_peerConnectionId) - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - - /* - * PeerConnection event listener. - */ - void eventListener(dynamic event) { - final Map map = event; - - switch (map['event']) { - case 'signalingState': - String state = map['state']; - if (this.onSignalingState != null) - this.onSignalingState(signalingStateForString(state)); - break; - case 'iceGatheringState': - String state = map['state']; - if (this.onSignalingState != null) - this.onIceGatheringState(iceGatheringStateforString(state)); - break; - case 'iceConnectionState': - String state = map['state']; - if (this.onSignalingState != null) - this.onIceConnectionState(iceConnectionStateForString(state)); - break; - case 'onCandidate': - Map cand = map['candidate']; - RTCIceCandidate candidate = new RTCIceCandidate( - cand['candidate'], cand['sdpMid'], cand['sdpMLineIndex']); - if (this.onIceCandidate != null) this.onIceCandidate(candidate); - break; - case 'onAddStream': - String streamId = map['streamId']; - MediaStream stream = new MediaStream(streamId); - stream.setMediaTracks(map['audioTracks'], map['videoTracks']); - if (this.onAddStream != null) this.onAddStream(stream); - break; - case 'onRemoveStream': - String streamId = map['streamId']; - MediaStream stream = new MediaStream(streamId); - if (this.onRemoveStream != null) this.onRemoveStream(stream); - break; - case 'onAddTrack': - String streamId = map['streamId']; - MediaStream stream = new MediaStream(streamId); - Map track = map['track']; - MediaStreamTrack newTrack = new MediaStreamTrack( - map['trackId'], track['label'], track['kind'], track['enabled']); - if (this.onAddTrack != null) this.onAddTrack(stream, newTrack); - break; - case 'onRemoveTrack': - String streamId = map['streamId']; - MediaStream stream = new MediaStream(streamId); - Map track = map['track']; - MediaStreamTrack oldTrack = new MediaStreamTrack( - map['trackId'], track['label'], track['kind'], track['enabled']); - if (this.onRemoveTrack != null) this.onRemoveTrack(stream, oldTrack); - break; - case 'didOpenDataChannel': - int dataChannelId = map['id']; - String label = map['label']; - _dataChannel = - new RTCDataChannel(this._peerConnectionId, label, dataChannelId); - if (this.onDataChannel != null) this.onDataChannel(_dataChannel); - break; - case 'onRenegotiationNeeded': - if (this.onRenegotiationNeeded != null) this.onRenegotiationNeeded(); - break; - } - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - Future dispose() async { - await _eventSubscription?.cancel(); - await _channel.invokeMethod( - 'peerConnectionDispose', - {'peerConnectionId': _peerConnectionId}, - ); - } - - EventChannel _eventChannelFor(String peerConnectionId) { - return new EventChannel( - 'cloudwebrtc.com/WebRTC/peerConnectoinEvent$peerConnectionId'); - } - - Future createOffer( - Map constraints) async { - try { - final Map response = - await _channel.invokeMethod('createOffer', { - 'peerConnectionId': this._peerConnectionId, - 'constraints': - constraints.length == 0 ? defaultSdpConstraints : constraints, - }); - - String sdp = response['sdp']; - String type = response['type']; - return new RTCSessionDescription(sdp, type); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::createOffer: ${e.message}'; - } - } - - Future createAnswer( - Map constraints) async { - try { - final Map response = - await _channel.invokeMethod('createAnswer', { - 'peerConnectionId': this._peerConnectionId, - 'constraints': - constraints.length == 0 ? defaultSdpConstraints : constraints, - }); - String sdp = response['sdp']; - String type = response['type']; - return new RTCSessionDescription(sdp, type); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::createAnswer: ${e.message}'; - } - } - - void addStream(MediaStream stream) { - _channel.invokeMethod('addStream', { - 'peerConnectionId': this._peerConnectionId, - 'streamId': stream.id, - }); - } - - void removeStream(MediaStream stream) { - _channel.invokeMethod('removeStream', { - 'peerConnectionId': this._peerConnectionId, - 'streamId': stream.id, - }); - } - - Future setLocalDescription(RTCSessionDescription description) async { - try { - await _channel.invokeMethod('setLocalDescription', { - 'peerConnectionId': this._peerConnectionId, - 'description': description.toMap(), - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::setLocalDescription: ${e.message}'; - } - } - - Future setRemoteDescription(RTCSessionDescription description) async { - try { - await _channel.invokeMethod('setRemoteDescription', { - 'peerConnectionId': this._peerConnectionId, - 'description': description.toMap(), - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::setRemoteDescription: ${e.message}'; - } - } - - void addCandidate(RTCIceCandidate candidate) { - _channel.invokeMethod('addCandidate', { - 'peerConnectionId': this._peerConnectionId, - 'candidate': candidate.toMap(), - }); - } - - Future> getStats(MediaStreamTrack track) async { - try { - final Map response = - await _channel.invokeMethod('getStats', { - 'peerConnectionId': this._peerConnectionId, - 'track': track != null ? track.id : null - }); - List stats = new List(); - if(response != null){ - List reports = response['stats']; - reports.forEach((report){ - stats.add(new StatsReport(report['id'], report['type'], report['timestamp'], report['values'])); - }); - } - return stats; - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::getStats: ${e.message}'; - } - } - - List getLocalStreams() { - return _localStreams; - } - - List getRemoteStreams() { - return _remoteStreams; - } - - Future createDataChannel( - String label, RTCDataChannelInit dataChannelDict) async { - try { - final Map response = - await _channel.invokeMethod('createDataChannel', { - 'peerConnectionId': this._peerConnectionId, - 'label': label, - 'dataChannelDict': dataChannelDict.toMap() - }); - _dataChannel = - new RTCDataChannel(this._peerConnectionId, label, dataChannelDict.id); - return _dataChannel; - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::createDataChannel: ${e.message}'; - } - } - - Future close() async { - try { - await _channel.invokeMethod('peerConnectionClose', { - 'peerConnectionId': this._peerConnectionId, - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::close: ${e.message}'; - } - } -} diff --git a/lib/rtc_peerconnection_factory.dart b/lib/rtc_peerconnection_factory.dart deleted file mode 100644 index 764a446cc5..0000000000 --- a/lib/rtc_peerconnection_factory.dart +++ /dev/null @@ -1,26 +0,0 @@ -import 'dart:async'; -import 'package:flutter/services.dart'; -import 'package:webrtc/rtc_peerconnection.dart'; -import 'package:webrtc/utils.dart'; - -Future createPeerConnection(Map configuration, Map constraints) async { - MethodChannel channel = WebRTC.methodChannel(); - - Map defaultConstraints = { - "mandatory": {}, - "optional": [ - {"DtlsSrtpKeyAgreement": true }, - ], - }; - - final Map response = await channel.invokeMethod( - 'createPeerConnection', - { - 'configuration': configuration, - 'constraints': constraints.length == 0? defaultConstraints : constraints - }, - ); - - String peerConnectionId = response['peerConnectionId']; - return new RTCPeerConnection(peerConnectionId); -} \ No newline at end of file diff --git a/lib/rtc_session_description.dart b/lib/rtc_session_description.dart deleted file mode 100644 index adc32abe72..0000000000 --- a/lib/rtc_session_description.dart +++ /dev/null @@ -1,10 +0,0 @@ - -class RTCSessionDescription { - String sdp; - String type; - RTCSessionDescription(this.sdp,this.type); - - dynamic toMap() { - return { "sdp": this.sdp, "type": this.type}; - } -} \ No newline at end of file diff --git a/lib/rtc_stats_report.dart b/lib/rtc_stats_report.dart deleted file mode 100644 index 7351723a32..0000000000 --- a/lib/rtc_stats_report.dart +++ /dev/null @@ -1,8 +0,0 @@ - -class StatsReport { - String id; - String type; - double timestamp; - Map values; - StatsReport(this.id, this.type, this.timestamp, this.values); -} \ No newline at end of file diff --git a/lib/rtc_video_view.dart b/lib/rtc_video_view.dart deleted file mode 100644 index 9611cfeaa7..0000000000 --- a/lib/rtc_video_view.dart +++ /dev/null @@ -1,166 +0,0 @@ -import 'dart:async'; -import 'dart:math'; -import 'package:flutter/material.dart'; -import 'package:flutter/services.dart'; -import 'package:webrtc/media_stream.dart'; -import 'package:webrtc/utils.dart'; - -enum RTCVideoViewObjectFit { - RTCVideoViewObjectFitContain, - RTCVideoViewObjectFitCover, -} - -typedef void VideoRotationChangeCallback(int textureId, int rotation); -typedef void VideoSizeChangeCallback( - int textureId, double width, double height); - -class RTCVideoRenderer { - MethodChannel _channel = WebRTC.methodChannel(); - int _textureId; - int _rotation = 0; - double _width = 0.0, _height = 0.0; - bool _mirror = false; - MediaStream _srcObject; - RTCVideoViewObjectFit _objectFit = - RTCVideoViewObjectFit.RTCVideoViewObjectFitContain; - StreamSubscription _eventSubscription; - VideoSizeChangeCallback onVideoSizeChanged; - VideoRotationChangeCallback onVideoRotationChanged; - dynamic onFirstFrameRendered; - - initialize() async { - final Map response = - await _channel.invokeMethod('createVideoRenderer', {}); - _textureId = response['textureId']; - _eventSubscription = _eventChannelFor(_textureId) - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - - int get rotation => _rotation; - - double get width => _width; - - double get height => _height; - - set mirror(bool mirror) { - _mirror = mirror; - } - - set objectFit(RTCVideoViewObjectFit objectFit) { - _objectFit = objectFit; - } - - set srcObject(MediaStream stream) { - _srcObject = stream; - _channel.invokeMethod('videoRendererSetSrcObject', { - 'textureId': _textureId, - 'streamId': stream != null ? stream.id : '' - }); - } - - Future dispose() async { - await _channel.invokeMethod( - 'videoRendererDispose', - {'textureId': _textureId}, - ); - } - - EventChannel _eventChannelFor(int textureId) { - return new EventChannel('cloudwebrtc.com/WebRTC/Texture$textureId'); - } - - void eventListener(dynamic event) { - final Map map = event; - switch (map['event']) { - case 'didTextureChangeRotation': - _rotation = map['rotation']; - if (this.onVideoRotationChanged != null) - this.onVideoRotationChanged(_textureId, _rotation); - break; - case 'didTextureChangeVideoSize': - _width = map['width']; - _height = map['height']; - if (this.onVideoSizeChanged != null) - this.onVideoSizeChanged(_textureId, _width, _height); - break; - case 'didFirstFrameRendered': - if (this.onFirstFrameRendered != null) this.onFirstFrameRendered(); - break; - } - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } -} - -class RTCVideoView extends StatefulWidget { - final RTCVideoRenderer renderer; - RTCVideoView(this.renderer); - @override - _RTCVideoViewState createState() => new _RTCVideoViewState(renderer); -} - -class _RTCVideoViewState extends State { - final RTCVideoRenderer renderer; - var aspectRatio = 1.0; - - _RTCVideoViewState(this.renderer); - - @override - void initState() { - super.initState(); - _setCallbacks(); - } - - @override - void deactivate() { - super.deactivate(); - renderer.onVideoRotationChanged = null; - renderer.onVideoSizeChanged = null; - renderer.onFirstFrameRendered = null; - } - - void _setCallbacks() { - renderer.onVideoRotationChanged = (int textureId, int rotation) { - setState(() { - _updateContainerSize(); - }); - }; - renderer.onVideoSizeChanged = (int textureId, double width, double height) { - setState(() { - _updateContainerSize(); - }); - }; - renderer.onFirstFrameRendered = () { - setState(() { - _updateContainerSize(); - }); - }; - } - - void _updateContainerSize() { - double textureWidth = 0.0, textureHeight = 0.0; - if (renderer.rotation == 90 || renderer.rotation == 270) { - textureWidth = min(renderer.width, renderer.height); - textureHeight = max(renderer.width, renderer.height); - aspectRatio = textureWidth / textureHeight; - } else { - textureWidth = max(renderer.width, renderer.height); - textureHeight = min(renderer.width, renderer.height); - aspectRatio = textureWidth / textureHeight; - } - } - - @override - Widget build(BuildContext context) { - return new Center( - child: (this.renderer._textureId == null || this.renderer._srcObject == null) - ? new Container() - : new AspectRatio( - aspectRatio: aspectRatio, - child: new Texture(textureId: this.renderer._textureId))); - } -} diff --git a/lib/src/desktop_capturer.dart b/lib/src/desktop_capturer.dart new file mode 100644 index 0000000000..1780cb593c --- /dev/null +++ b/lib/src/desktop_capturer.dart @@ -0,0 +1,71 @@ +import 'dart:async'; +import 'dart:typed_data'; + +enum SourceType { + Screen, + Window, +} + +final desktopSourceTypeToString = { + SourceType.Screen: 'screen', + SourceType.Window: 'window', +}; + +final tringToDesktopSourceType = { + 'screen': SourceType.Screen, + 'window': SourceType.Window, +}; + +class ThumbnailSize { + ThumbnailSize(this.width, this.height); + factory ThumbnailSize.fromMap(Map map) { + return ThumbnailSize(map['width'], map['height']); + } + int width; + int height; + + Map toMap() => {'width': width, 'height': height}; +} + +abstract class DesktopCapturerSource { + /// The identifier of a window or screen that can be used as a + /// chromeMediaSourceId constraint when calling + String get id; + + /// A screen source will be named either Entire Screen or Screen , + /// while the name of a window source will match the window title. + String get name; + + ///A thumbnail image of the source. jpeg encoded. + Uint8List? get thumbnail; + + /// specified in the options passed to desktopCapturer.getSources. + /// The actual size depends on the scale of the screen or window. + ThumbnailSize get thumbnailSize; + + /// The type of the source. + SourceType get type; + + StreamController get onNameChanged => throw UnimplementedError(); + + StreamController get onThumbnailChanged => + throw UnimplementedError(); +} + +abstract class DesktopCapturer { + StreamController get onAdded => + throw UnimplementedError(); + StreamController get onRemoved => + throw UnimplementedError(); + StreamController get onNameChanged => + throw UnimplementedError(); + StreamController get onThumbnailChanged => + throw UnimplementedError(); + + ///Get the screen source of the specified types + Future> getSources( + {required List types, ThumbnailSize? thumbnailSize}); + + /// Updates the list of screen sources of the specified types + Future updateSources({required List types}); +} diff --git a/lib/src/helper.dart b/lib/src/helper.dart new file mode 100644 index 0000000000..6f1e9666bf --- /dev/null +++ b/lib/src/helper.dart @@ -0,0 +1,182 @@ +import 'dart:math'; + +import 'package:flutter/foundation.dart'; + +import '../flutter_webrtc.dart'; + +class Helper { + static Future> enumerateDevices(String type) async { + var devices = await navigator.mediaDevices.enumerateDevices(); + return devices.where((d) => d.kind == type).toList(); + } + + /// Return the available cameras + /// + /// Note: Make sure to call this gettet after + /// navigator.mediaDevices.getUserMedia(), otherwise the devices will not be + /// listed. + static Future> get cameras => + enumerateDevices('videoinput'); + + /// Return the available audiooutputs + /// + /// Note: Make sure to call this gettet after + /// navigator.mediaDevices.getUserMedia(), otherwise the devices will not be + /// listed. + static Future> get audiooutputs => + enumerateDevices('audiooutput'); + + /// For web implementation, make sure to pass the target deviceId + static Future switchCamera(MediaStreamTrack track, + [String? deviceId, MediaStream? stream]) async { + if (track.kind != 'video') { + throw 'The is not a video track => $track'; + } + + if (!kIsWeb) { + return WebRTC.invokeMethod( + 'mediaStreamTrackSwitchCamera', + {'trackId': track.id}, + ).then((value) => value ?? false); + } + + if (deviceId == null) throw 'You need to specify the deviceId'; + if (stream == null) throw 'You need to specify the stream'; + + var cams = await cameras; + if (!cams.any((e) => e.deviceId == deviceId)) { + throw 'The provided deviceId is not available, make sure to retreive the deviceId from Helper.cammeras()'; + } + + // stop only video tracks + // so that we can recapture video track + stream.getVideoTracks().forEach((track) { + track.stop(); + stream.removeTrack(track); + }); + + var mediaConstraints = { + 'audio': false, // NO need to capture audio again + 'video': {'deviceId': deviceId} + }; + + var newStream = await openCamera(mediaConstraints); + var newCamTrack = newStream.getVideoTracks()[0]; + + await stream.addTrack(newCamTrack, addToNative: true); + + return Future.value(true); + } + + static Future setZoom(MediaStreamTrack videoTrack, double zoomLevel) => + CameraUtils.setZoom(videoTrack, zoomLevel); + + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) => + CameraUtils.setFocusMode(videoTrack, focusMode); + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setFocusPoint(videoTrack, point); + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) => + CameraUtils.setExposureMode(videoTrack, exposureMode); + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setExposurePoint(videoTrack, point); + + /// Used to select a specific audio output device. + /// + /// Note: This method is only used for Flutter native, + /// supported on iOS/Android/macOS/Windows. + /// + /// Android/macOS/Windows: Can be used to switch all output devices. + /// iOS: you can only switch directly between the + /// speaker and the preferred device + /// web: flutter web can use RTCVideoRenderer.audioOutput instead + static Future selectAudioOutput(String deviceId) async { + await navigator.mediaDevices + .selectAudioOutput(AudioOutputOptions(deviceId: deviceId)); + } + + /// Set audio input device for Flutter native + /// Note: The usual practice in flutter web is to use deviceId as the + /// `getUserMedia` parameter to get a new audio track and replace it with the + /// audio track in the original rtpsender. + static Future selectAudioInput(String deviceId) => + NativeAudioManagement.selectAudioInput(deviceId); + + /// Enable or disable speakerphone + /// for iOS/Android only + static Future setSpeakerphoneOn(bool enable) => + NativeAudioManagement.setSpeakerphoneOn(enable); + + /// Ensure audio session + /// for iOS only + static Future ensureAudioSession() => + NativeAudioManagement.ensureAudioSession(); + + /// Enable speakerphone, but use bluetooth if audio output device available + /// for iOS/Android only + static Future setSpeakerphoneOnButPreferBluetooth() => + NativeAudioManagement.setSpeakerphoneOnButPreferBluetooth(); + + /// To select a a specific camera, you need to set constraints + /// eg. + /// var constraints = { + /// 'audio': true, + /// 'video': { + /// 'deviceId': Helper.cameras[0].deviceId, + /// } + /// }; + /// + /// var stream = await Helper.openCamera(constraints); + /// + static Future openCamera(Map mediaConstraints) { + return navigator.mediaDevices.getUserMedia(mediaConstraints); + } + + /// Set the volume for Flutter native + static Future setVolume(double volume, MediaStreamTrack track) => + NativeAudioManagement.setVolume(volume, track); + + /// Set the microphone mute/unmute for Flutter native + static Future setMicrophoneMute(bool mute, MediaStreamTrack track) => + NativeAudioManagement.setMicrophoneMute(mute, track); + + /// Set the audio configuration to for Android. + /// Must be set before initiating a WebRTC session and cannot be changed + /// mid session. + static Future setAndroidAudioConfiguration( + AndroidAudioConfiguration androidAudioConfiguration) => + AndroidNativeAudioManagement.setAndroidAudioConfiguration( + androidAudioConfiguration); + + /// After Android app finishes a session, on audio focus loss, clear the active communication device. + static Future clearAndroidCommunicationDevice() => + WebRTC.invokeMethod('clearAndroidCommunicationDevice'); + + /// Set the audio configuration for iOS + static Future setAppleAudioConfiguration( + AppleAudioConfiguration appleAudioConfiguration) => + AppleNativeAudioManagement.setAppleAudioConfiguration( + appleAudioConfiguration); + + /// Set the audio configuration for iOS + static Future setAppleAudioIOMode(AppleAudioIOMode mode, + {bool preferSpeakerOutput = false}) => + AppleNativeAudioManagement.setAppleAudioConfiguration( + AppleNativeAudioManagement.getAppleAudioConfigurationForMode(mode, + preferSpeakerOutput: preferSpeakerOutput)); + + /// Request capture permission for Android + static Future requestCapturePermission() async { + if (WebRTC.platformIsAndroid) { + return await WebRTC.invokeMethod('requestCapturePermission'); + } else { + throw Exception('requestCapturePermission only support for Android'); + } + } +} diff --git a/lib/src/media_devices.dart b/lib/src/media_devices.dart new file mode 100644 index 0000000000..3b2f643ddf --- /dev/null +++ b/lib/src/media_devices.dart @@ -0,0 +1,23 @@ +import '../flutter_webrtc.dart'; + +class MediaDevices { + @Deprecated( + 'Use the navigator.mediaDevices.getUserMedia(Map) provide from the factory instead') + static Future getUserMedia( + Map mediaConstraints) async { + return navigator.mediaDevices.getUserMedia(mediaConstraints); + } + + @Deprecated( + 'Use the navigator.mediaDevices.getDisplayMedia(Map) provide from the factory instead') + static Future getDisplayMedia( + Map mediaConstraints) async { + return navigator.mediaDevices.getDisplayMedia(mediaConstraints); + } + + @Deprecated( + 'Use the navigator.mediaDevices.getSources() provide from the factory instead') + static Future> getSources() { + return navigator.mediaDevices.getSources(); + } +} diff --git a/lib/src/media_recorder.dart b/lib/src/media_recorder.dart new file mode 100644 index 0000000000..370cfa9c78 --- /dev/null +++ b/lib/src/media_recorder.dart @@ -0,0 +1,47 @@ +import 'package:flutter/foundation.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart' as rtc; + +import '../flutter_webrtc.dart'; +import 'native/media_recorder_impl.dart' show MediaRecorderNative; + +class MediaRecorder extends rtc.MediaRecorder { + MediaRecorder({ + String? albumName, + }) : _delegate = (kIsWeb || kIsWasm) + ? mediaRecorder() + : MediaRecorderNative(albumName: albumName); + + final rtc.MediaRecorder _delegate; + + @override + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + int rotationDegrees = 0, + }) { + return _delegate.start( + path, + videoTrack: videoTrack, + audioChannel: audioChannel, + ); + } + + @override + Future stop() => _delegate.stop(); + + @override + void startWeb( + MediaStream stream, { + Function(dynamic blob, bool isLastOne)? onDataChunk, + String? mimeType, + int timeSlice = 1000, + }) => + _delegate.startWeb( + stream, + onDataChunk: onDataChunk, + mimeType: mimeType ?? 'video/webm', + timeSlice: timeSlice, + ); +} diff --git a/lib/src/native/adapter_type.dart b/lib/src/native/adapter_type.dart new file mode 100644 index 0000000000..fa5f85a833 --- /dev/null +++ b/lib/src/native/adapter_type.dart @@ -0,0 +1,9 @@ +enum AdapterType { + adapterTypeUnknown, + adapterTypeEthernet, + adapterTypeWifi, + adapterTypeCellular, + adapterTypeVpn, + adapterTypeLoopback, + adapterTypeAny +} diff --git a/lib/src/native/android/audio_configuration.dart b/lib/src/native/android/audio_configuration.dart new file mode 100644 index 0000000000..150962b6a0 --- /dev/null +++ b/lib/src/native/android/audio_configuration.dart @@ -0,0 +1,158 @@ +import '../utils.dart'; + +enum AndroidAudioMode { + normal, + callScreening, + inCall, + inCommunication, + ringtone, +} + +extension AndroidAudioModeEnumEx on String { + AndroidAudioMode toAndroidAudioMode() => + AndroidAudioMode.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioFocusMode { + gain, + gainTransient, + gainTransientExclusive, + gainTransientMayDuck +} + +extension AndroidAudioFocusModeEnumEx on String { + AndroidAudioFocusMode toAndroidAudioFocusMode() => + AndroidAudioFocusMode.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioStreamType { + accessibility, + alarm, + dtmf, + music, + notification, + ring, + system, + voiceCall +} + +extension AndroidAudioStreamTypeEnumEx on String { + AndroidAudioStreamType toAndroidAudioStreamType() => + AndroidAudioStreamType.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioAttributesUsageType { + alarm, + assistanceAccessibility, + assistanceNavigationGuidance, + assistanceSonification, + assistant, + game, + media, + notification, + notificationEvent, + notificationRingtone, + unknown, + voiceCommunication, + voiceCommunicationSignalling +} + +extension AndroidAudioAttributesUsageTypeEnumEx on String { + AndroidAudioAttributesUsageType toAndroidAudioAttributesUsageType() => + AndroidAudioAttributesUsageType.values + .firstWhere((d) => d.name == toLowerCase()); +} + +enum AndroidAudioAttributesContentType { + movie, + music, + sonification, + speech, + unknown +} + +extension AndroidAudioAttributesContentTypeEnumEx on String { + AndroidAudioAttributesContentType toAndroidAudioAttributesContentType() => + AndroidAudioAttributesContentType.values + .firstWhere((d) => d.name == toLowerCase()); +} + +class AndroidAudioConfiguration { + AndroidAudioConfiguration({ + this.manageAudioFocus, + this.androidAudioMode, + this.androidAudioFocusMode, + this.androidAudioStreamType, + this.androidAudioAttributesUsageType, + this.androidAudioAttributesContentType, + this.forceHandleAudioRouting, + }); + + /// Controls whether audio focus should be automatically managed during + /// a WebRTC session. + final bool? manageAudioFocus; + final AndroidAudioMode? androidAudioMode; + final AndroidAudioFocusMode? androidAudioFocusMode; + final AndroidAudioStreamType? androidAudioStreamType; + final AndroidAudioAttributesUsageType? androidAudioAttributesUsageType; + final AndroidAudioAttributesContentType? androidAudioAttributesContentType; + + /// On certain Android devices, audio routing does not function properly and + /// bluetooth microphones will not work unless audio mode is set to + /// `inCommunication` or `inCall`. Audio routing is turned off those cases. + /// + /// If this set to true, will attempt to do audio routing regardless of audio mode. + final bool? forceHandleAudioRouting; + + Map toMap() => { + if (manageAudioFocus != null) 'manageAudioFocus': manageAudioFocus!, + if (androidAudioMode != null) + 'androidAudioMode': androidAudioMode!.name, + if (androidAudioFocusMode != null) + 'androidAudioFocusMode': androidAudioFocusMode!.name, + if (androidAudioStreamType != null) + 'androidAudioStreamType': androidAudioStreamType!.name, + if (androidAudioAttributesUsageType != null) + 'androidAudioAttributesUsageType': + androidAudioAttributesUsageType!.name, + if (androidAudioAttributesContentType != null) + 'androidAudioAttributesContentType': + androidAudioAttributesContentType!.name, + if (forceHandleAudioRouting != null) + 'forceHandleAudioRouting': forceHandleAudioRouting!, + }; + + /// A pre-configured AndroidAudioConfiguration for media playback. + static final media = AndroidAudioConfiguration( + manageAudioFocus: true, + androidAudioMode: AndroidAudioMode.normal, + androidAudioFocusMode: AndroidAudioFocusMode.gain, + androidAudioStreamType: AndroidAudioStreamType.music, + androidAudioAttributesUsageType: AndroidAudioAttributesUsageType.media, + androidAudioAttributesContentType: + AndroidAudioAttributesContentType.unknown, + ); + + /// A pre-configured AndroidAudioConfiguration for voice communication. + static final communication = AndroidAudioConfiguration( + manageAudioFocus: true, + androidAudioMode: AndroidAudioMode.inCommunication, + androidAudioFocusMode: AndroidAudioFocusMode.gain, + androidAudioStreamType: AndroidAudioStreamType.voiceCall, + androidAudioAttributesUsageType: + AndroidAudioAttributesUsageType.voiceCommunication, + androidAudioAttributesContentType: AndroidAudioAttributesContentType.speech, + ); +} + +class AndroidNativeAudioManagement { + static Future setAndroidAudioConfiguration( + AndroidAudioConfiguration config) async { + if (WebRTC.platformIsAndroid) { + await WebRTC.invokeMethod( + 'setAndroidAudioConfiguration', + {'configuration': config.toMap()}, + ); + } + } +} diff --git a/lib/src/native/audio_management.dart b/lib/src/native/audio_management.dart new file mode 100644 index 0000000000..dabf4d5b0f --- /dev/null +++ b/lib/src/native/audio_management.dart @@ -0,0 +1,69 @@ +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class NativeAudioManagement { + static Future selectAudioInput(String deviceId) async { + await WebRTC.invokeMethod( + 'selectAudioInput', + {'deviceId': deviceId}, + ); + } + + static Future setSpeakerphoneOn(bool enable) async { + await WebRTC.invokeMethod( + 'enableSpeakerphone', + {'enable': enable}, + ); + } + + static Future ensureAudioSession() async { + await WebRTC.invokeMethod('ensureAudioSession'); + } + + static Future setSpeakerphoneOnButPreferBluetooth() async { + await WebRTC.invokeMethod('enableSpeakerphoneButPreferBluetooth'); + } + + static Future setVolume(double volume, MediaStreamTrack track) async { + if (track.kind == 'audio') { + if (kIsWeb) { + final constraints = track.getConstraints(); + constraints['volume'] = volume; + await track.applyConstraints(constraints); + } else { + await WebRTC.invokeMethod('setVolume', { + 'trackId': track.id, + 'volume': volume, + 'peerConnectionId': + track is MediaStreamTrackNative ? track.peerConnectionId : null + }); + } + } + + return Future.value(); + } + + static Future setMicrophoneMute( + bool mute, MediaStreamTrack track) async { + if (track.kind != 'audio') { + throw 'The is not an audio track => $track'; + } + + if (!kIsWeb) { + try { + await WebRTC.invokeMethod( + 'setMicrophoneMute', + {'trackId': track.id, 'mute': mute}, + ); + } on PlatformException catch (e) { + throw 'Unable to MediaStreamTrack::setMicrophoneMute: ${e.message}'; + } + } + track.enabled = !mute; + } +} diff --git a/lib/src/native/camera_utils.dart b/lib/src/native/camera_utils.dart new file mode 100644 index 0000000000..3557a64e46 --- /dev/null +++ b/lib/src/native/camera_utils.dart @@ -0,0 +1,93 @@ +import 'dart:math'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +enum CameraFocusMode { auto, locked } + +enum CameraExposureMode { auto, locked } + +class CameraUtils { + static Future setZoom( + MediaStreamTrack videoTrack, double zoomLevel) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetZoom', + {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, + ); + } else { + throw Exception('setZoom only support for mobile devices!'); + } + } + + /// Set the exposure point for the camera, focusMode can be: + /// 'auto', 'locked' + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusMode', + { + 'trackId': videoTrack.id, + 'focusMode': focusMode.name, + }, + ); + } else { + throw Exception('setFocusMode only support for mobile devices!'); + } + } + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusPoint', + { + 'trackId': videoTrack.id, + 'focusPoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setFocusPoint only support for mobile devices!'); + } + } + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposureMode', + { + 'trackId': videoTrack.id, + 'exposureMode': exposureMode.name, + }, + ); + } else { + throw Exception('setExposureMode only support for mobile devices!'); + } + } + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposurePoint', + { + 'trackId': videoTrack.id, + 'exposurePoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setExposurePoint only support for mobile devices!'); + } + } +} diff --git a/lib/src/native/desktop_capturer_impl.dart b/lib/src/native/desktop_capturer_impl.dart new file mode 100644 index 0000000000..f6cc4aab29 --- /dev/null +++ b/lib/src/native/desktop_capturer_impl.dart @@ -0,0 +1,195 @@ +import 'dart:async'; +import 'dart:typed_data'; + +import '../desktop_capturer.dart'; +import 'event_channel.dart'; +import 'utils.dart'; + +class DesktopCapturerSourceNative extends DesktopCapturerSource { + DesktopCapturerSourceNative( + this._id, this._name, this._thumbnailSize, this._type); + factory DesktopCapturerSourceNative.fromMap(Map map) { + var sourceType = (map['type'] as String) == 'window' + ? SourceType.Window + : SourceType.Screen; + var source = DesktopCapturerSourceNative(map['id'], map['name'], + ThumbnailSize.fromMap(map['thumbnailSize']), sourceType); + if (map['thumbnail'] != null) { + source.thumbnail = map['thumbnail'] as Uint8List; + } + return source; + } + + //ignore: close_sinks + final StreamController _onNameChanged = + StreamController.broadcast(sync: true); + + @override + StreamController get onNameChanged => _onNameChanged; + + final StreamController _onThumbnailChanged = + StreamController.broadcast(sync: true); + + @override + StreamController get onThumbnailChanged => _onThumbnailChanged; + + Uint8List? _thumbnail; + String _name; + final String _id; + final ThumbnailSize _thumbnailSize; + final SourceType _type; + + set thumbnail(Uint8List? value) { + _thumbnail = value; + } + + set name(String name) { + _name = name; + } + + @override + String get id => _id; + + @override + String get name => _name; + + @override + Uint8List? get thumbnail => _thumbnail; + + @override + ThumbnailSize get thumbnailSize => _thumbnailSize; + + @override + SourceType get type => _type; +} + +class DesktopCapturerNative extends DesktopCapturer { + DesktopCapturerNative._internal() { + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data[event]; + handleEvent(event, map); + }); + } + static final DesktopCapturerNative instance = + DesktopCapturerNative._internal(); + + @override + StreamController get onAdded => _onAdded; + final StreamController _onAdded = + StreamController.broadcast(sync: true); + + @override + StreamController get onRemoved => _onRemoved; + final StreamController _onRemoved = + StreamController.broadcast(sync: true); + + @override + StreamController get onNameChanged => _onNameChanged; + final StreamController _onNameChanged = + StreamController.broadcast(sync: true); + + @override + StreamController get onThumbnailChanged => + _onThumbnailChanged; + final StreamController _onThumbnailChanged = + StreamController.broadcast(sync: true); + + final Map _sources = {}; + + void handleEvent(String event, Map map) async { + switch (event) { + case 'desktopSourceAdded': + final source = DesktopCapturerSourceNative.fromMap(map); + if (_sources[source.id] == null) { + _sources[source.id] = source; + _onAdded.add(source); + } + break; + case 'desktopSourceRemoved': + final id = map['id'] as String; + if (_sources[id] != null) { + _onRemoved.add(_sources.remove(id)!); + } + break; + case 'desktopSourceThumbnailChanged': + final source = _sources[map['id'] as String]; + if (source != null) { + try { + source.thumbnail = map['thumbnail'] as Uint8List; + _onThumbnailChanged.add(source); + source.onThumbnailChanged.add(source.thumbnail!); + } catch (e) { + print('desktopSourceThumbnailChanged: $e'); + } + } + break; + case 'desktopSourceNameChanged': + final source = _sources[map['id'] as String]; + if (source != null) { + source.name = map['name']; + _onNameChanged.add(source); + source.onNameChanged.add(source.name); + } + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + Future> getSources( + {required List types, ThumbnailSize? thumbnailSize}) async { + _sources.clear(); + final response = await WebRTC.invokeMethod( + 'getDesktopSources', + { + 'types': types.map((type) => desktopSourceTypeToString[type]).toList(), + if (thumbnailSize != null) 'thumbnailSize': thumbnailSize.toMap(), + }, + ); + if (response == null) { + throw Exception('getDesktopSources return null, something wrong'); + } + for (var source in response['sources']) { + var desktopSource = DesktopCapturerSourceNative.fromMap(source); + _sources[desktopSource.id] = desktopSource; + } + return _sources.values.toList(); + } + + @override + Future updateSources({required List types}) async { + final response = await WebRTC.invokeMethod( + 'updateDesktopSources', + { + 'types': types.map((type) => desktopSourceTypeToString[type]).toList(), + }, + ); + if (response == null) { + throw Exception('updateSources return null, something wrong'); + } + return response['result'] as bool; + } + + Future getThumbnail(DesktopCapturerSourceNative source) async { + final response = await WebRTC.invokeMethod( + 'getDesktopSourceThumbnail', + { + 'sourceId': source.id, + 'thumbnailSize': { + 'width': source.thumbnailSize.width, + 'height': source.thumbnailSize.height + } + }, + ); + if (response == null || !response is Uint8List?) { + throw Exception('getDesktopSourceThumbnail return null, something wrong'); + } + return response as Uint8List?; + } +} diff --git a/lib/src/native/event_channel.dart b/lib/src/native/event_channel.dart new file mode 100644 index 0000000000..de43755a0a --- /dev/null +++ b/lib/src/native/event_channel.dart @@ -0,0 +1,28 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +class FlutterWebRTCEventChannel { + FlutterWebRTCEventChannel._internal() { + EventChannel('FlutterWebRTC.Event') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + static final FlutterWebRTCEventChannel instance = + FlutterWebRTCEventChannel._internal(); + + final StreamController> handleEvents = + StreamController.broadcast(); + + void eventListener(dynamic event) async { + final Map map = event; + handleEvents.add({map['event'] as String: map}); + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } +} diff --git a/lib/src/native/factory_impl.dart b/lib/src/native/factory_impl.dart new file mode 100644 index 0000000000..676e8c67da --- /dev/null +++ b/lib/src/native/factory_impl.dart @@ -0,0 +1,122 @@ +import 'dart:async'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../desktop_capturer.dart'; +import 'desktop_capturer_impl.dart'; +import 'frame_cryptor_impl.dart'; +import 'media_recorder_impl.dart'; +import 'media_stream_impl.dart'; +import 'mediadevices_impl.dart'; +import 'navigator_impl.dart'; +import 'rtc_peerconnection_impl.dart'; +import 'rtc_video_renderer_impl.dart'; +import 'utils.dart'; + +class RTCFactoryNative extends RTCFactory { + RTCFactoryNative._internal(); + + static final RTCFactory instance = RTCFactoryNative._internal(); + + @override + Future createLocalMediaStream(String label) async { + final response = await WebRTC.invokeMethod('createLocalMediaStream'); + if (response == null) { + throw Exception('createLocalMediaStream return null, something wrong'); + } + return MediaStreamNative(response['streamId'], label); + } + + @override + Future createPeerConnection( + Map configuration, + [Map constraints = const {}]) async { + var defaultConstraints = { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': true}, + ], + }; + + final response = await WebRTC.invokeMethod( + 'createPeerConnection', + { + 'configuration': configuration, + 'constraints': constraints.isEmpty ? defaultConstraints : constraints + }, + ); + + String peerConnectionId = response['peerConnectionId']; + return RTCPeerConnectionNative(peerConnectionId, configuration); + } + + @override + MediaRecorder mediaRecorder() { + return MediaRecorderNative(); + } + + @override + VideoRenderer videoRenderer() { + return RTCVideoRenderer(); + } + + @override + Navigator get navigator => NavigatorNative.instance; + + @override + FrameCryptorFactory get frameCryptorFactory => + FrameCryptorFactoryImpl.instance; + + @override + Future getRtpReceiverCapabilities(String kind) async { + final response = await WebRTC.invokeMethod( + 'getRtpReceiverCapabilities', + { + 'kind': kind, + }, + ); + return RTCRtpCapabilities.fromMap(response); + } + + @override + Future getRtpSenderCapabilities(String kind) async { + final response = await WebRTC.invokeMethod( + 'getRtpSenderCapabilities', + { + 'kind': kind, + }, + ); + return RTCRtpCapabilities.fromMap(response); + } +} + +Future createPeerConnection( + Map configuration, + [Map constraints = const {}]) async { + return RTCFactoryNative.instance + .createPeerConnection(configuration, constraints); +} + +Future createLocalMediaStream(String label) async { + return RTCFactoryNative.instance.createLocalMediaStream(label); +} + +Future getRtpReceiverCapabilities(String kind) async { + return RTCFactoryNative.instance.getRtpReceiverCapabilities(kind); +} + +Future getRtpSenderCapabilities(String kind) async { + return RTCFactoryNative.instance.getRtpSenderCapabilities(kind); +} + +MediaRecorder mediaRecorder() { + return RTCFactoryNative.instance.mediaRecorder(); +} + +Navigator get navigator => RTCFactoryNative.instance.navigator; + +DesktopCapturer get desktopCapturer => DesktopCapturerNative.instance; + +MediaDevices get mediaDevices => MediaDeviceNative.instance; + +FrameCryptorFactory get frameCryptorFactory => FrameCryptorFactoryImpl.instance; diff --git a/lib/src/native/frame_cryptor_impl.dart b/lib/src/native/frame_cryptor_impl.dart new file mode 100644 index 0000000000..715e294cf1 --- /dev/null +++ b/lib/src/native/frame_cryptor_impl.dart @@ -0,0 +1,339 @@ +import 'dart:async'; +import 'dart:core'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'utils.dart'; + +class KeyProviderImpl implements KeyProvider { + KeyProviderImpl(this._id); + final String _id; + @override + String get id => _id; + + @override + Future setSharedKey({required Uint8List key, int index = 0}) async { + try { + await WebRTC.invokeMethod('keyProviderSetSharedKey', { + 'keyProviderId': _id, + 'keyIndex': index, + 'key': key, + }); + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::setSharedKey: ${e.message}'; + } + } + + @override + Future ratchetSharedKey({int index = 0}) async { + try { + final response = await WebRTC.invokeMethod( + 'keyProviderRatchetSharedKey', { + 'keyProviderId': _id, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::ratchetSharedKey: ${e.message}'; + } + } + + @override + Future exportSharedKey({int index = 0}) async { + try { + final response = await WebRTC.invokeMethod( + 'keyProviderExportSharedKey', { + 'keyProviderId': _id, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::exportSharedKey: ${e.message}'; + } + } + + @override + Future setKey({ + required String participantId, + required int index, + required Uint8List key, + }) async { + try { + final response = + await WebRTC.invokeMethod('keyProviderSetKey', { + 'keyProviderId': _id, + 'keyIndex': index, + 'key': key, + 'participantId': participantId, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::setKey: ${e.message}'; + } + } + + @override + Future ratchetKey({ + required String participantId, + required int index, + }) async { + try { + final response = + await WebRTC.invokeMethod('keyProviderRatchetKey', { + 'keyProviderId': _id, + 'keyIndex': index, + 'participantId': participantId, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::ratchetKey: ${e.message}'; + } + } + + @override + Future exportKey({ + required String participantId, + required int index, + }) async { + try { + final response = + await WebRTC.invokeMethod('keyProviderExportKey', { + 'keyProviderId': _id, + 'participantId': participantId, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::exportSharedKey: ${e.message}'; + } + } + + @override + Future setSifTrailer({required Uint8List trailer}) async { + try { + await WebRTC.invokeMethod('keyProviderSetSifTrailer', { + 'keyProviderId': _id, + 'sifTrailer': trailer, + }); + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::setSifTrailer: ${e.message}'; + } + } + + @override + Future dispose() async { + try { + await WebRTC.invokeMethod('keyProviderDispose', { + 'keyProviderId': _id, + }); + } on PlatformException catch (e) { + throw 'Unable to KeyProviderImpl::dispose: ${e.message}'; + } + } +} + +class FrameCryptorFactoryImpl implements FrameCryptorFactory { + FrameCryptorFactoryImpl._internal(); + + static final FrameCryptorFactoryImpl instance = + FrameCryptorFactoryImpl._internal(); + + @override + Future createFrameCryptorForRtpSender({ + required String participantId, + required RTCRtpSender sender, + required Algorithm algorithm, + required KeyProvider keyProvider, + }) async { + RTCRtpSenderNative nativeSender = sender as RTCRtpSenderNative; + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorFactoryCreateFrameCryptor', { + 'peerConnectionId': nativeSender.peerConnectionId, + 'rtpSenderId': sender.senderId, + 'participantId': participantId, + 'keyProviderId': keyProvider.id, + 'algorithm': algorithm.index, + 'type': 'sender', + }); + var frameCryptorId = response['frameCryptorId']; + return FrameCryptorImpl(frameCryptorId, participantId); + } on PlatformException catch (e) { + throw 'Unable to FrameCryptorFactory::createFrameCryptorForRtpSender: ${e.message}'; + } + } + + @override + Future createFrameCryptorForRtpReceiver({ + required String participantId, + required RTCRtpReceiver receiver, + required Algorithm algorithm, + required KeyProvider keyProvider, + }) async { + RTCRtpReceiverNative nativeReceiver = receiver as RTCRtpReceiverNative; + + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorFactoryCreateFrameCryptor', { + 'peerConnectionId': nativeReceiver.peerConnectionId, + 'rtpReceiverId': nativeReceiver.receiverId, + 'participantId': participantId, + 'keyProviderId': keyProvider.id, + 'algorithm': algorithm.index, + 'type': 'receiver', + }); + var frameCryptorId = response['frameCryptorId']; + return FrameCryptorImpl(frameCryptorId, participantId); + } on PlatformException catch (e) { + throw 'Unable to FrameCryptorFactory::createFrameCryptorForRtpReceiver: ${e.message}'; + } + } + + @override + Future createDefaultKeyProvider( + KeyProviderOptions options) async { + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorFactoryCreateKeyProvider', { + 'keyProviderOptions': options.toJson(), + }); + String keyProviderId = response['keyProviderId']; + return KeyProviderImpl(keyProviderId); + } on PlatformException catch (e) { + throw 'Unable to FrameCryptorFactory::createKeyProvider: ${e.message}'; + } + } +} + +class FrameCryptorImpl extends FrameCryptor { + FrameCryptorImpl(this._frameCryptorId, this._participantId) { + _eventSubscription = _eventChannelFor(_frameCryptorId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + final String _frameCryptorId; + final String _participantId; + @override + String get participantId => _participantId; + + StreamSubscription? _eventSubscription; + + EventChannel _eventChannelFor(String peerConnectionId) { + return EventChannel('FlutterWebRTC/frameCryptorEvent$_frameCryptorId'); + } + + void errorListener(Object obj) { + if (obj is Exception) throw obj; + } + + FrameCryptorState _cryptorStateFromString(String str) { + switch (str) { + case 'new': + return FrameCryptorState.FrameCryptorStateNew; + case 'ok': + return FrameCryptorState.FrameCryptorStateOk; + case 'decryptionFailed': + return FrameCryptorState.FrameCryptorStateDecryptionFailed; + case 'encryptionFailed': + return FrameCryptorState.FrameCryptorStateEncryptionFailed; + case 'internalError': + return FrameCryptorState.FrameCryptorStateInternalError; + case "keyRatcheted": + return FrameCryptorState.FrameCryptorStateKeyRatcheted; + case 'missingKey': + return FrameCryptorState.FrameCryptorStateMissingKey; + default: + throw 'Unknown FrameCryptorState: $str'; + } + } + + void eventListener(dynamic event) { + final Map map = event; + switch (map['event']) { + case 'frameCryptionStateChanged': + var state = _cryptorStateFromString(map['state']); + var participantId = map['participantId']; + onFrameCryptorStateChanged?.call(participantId, state); + break; + } + } + + @override + Future updateCodec(String codec) async { + /// only needs for flutter web + } + + @override + Future setKeyIndex(int index) async { + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorSetKeyIndex', { + 'frameCryptorId': _frameCryptorId, + 'keyIndex': index, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setKeyIndex: ${e.message}'; + } + } + + @override + Future get keyIndex async { + try { + final response = await WebRTC.invokeMethod( + 'frameCryptorGetKeyIndex', { + 'frameCryptorId': _frameCryptorId, + }); + return response['keyIndex']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::keyIndex: ${e.message}'; + } + } + + @override + Future setEnabled(bool enabled) async { + try { + final response = + await WebRTC.invokeMethod('frameCryptorSetEnabled', { + 'frameCryptorId': _frameCryptorId, + 'enabled': enabled, + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setEnabled: ${e.message}'; + } + } + + @override + Future get enabled async { + try { + final response = + await WebRTC.invokeMethod('frameCryptorGetEnabled', { + 'frameCryptorId': _frameCryptorId, + }); + return response['enabled']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::enabled: ${e.message}'; + } + } + + @override + Future dispose() async { + _eventSubscription?.cancel(); + _eventSubscription = null; + try { + final response = + await WebRTC.invokeMethod('frameCryptorDispose', { + 'frameCryptorId': _frameCryptorId, + }); + var res = response['result']; + print('res $res'); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::dispose: ${e.message}'; + } + } +} diff --git a/lib/src/native/ios/audio_configuration.dart b/lib/src/native/ios/audio_configuration.dart new file mode 100644 index 0000000000..b840a0a8ae --- /dev/null +++ b/lib/src/native/ios/audio_configuration.dart @@ -0,0 +1,123 @@ +import '../utils.dart'; + +enum AppleAudioMode { + default_, + gameChat, + measurement, + moviePlayback, + spokenAudio, + videoChat, + videoRecording, + voiceChat, + voicePrompt, +} + +extension AppleAudioModeEnumEx on String { + AppleAudioMode toAppleAudioMode() => + AppleAudioMode.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AppleAudioCategory { + soloAmbient, + playback, + record, + playAndRecord, + multiRoute, +} + +extension AppleAudioCategoryEnumEx on String { + AppleAudioCategory toAppleAudioCategory() => + AppleAudioCategory.values.firstWhere((d) => d.name == toLowerCase()); +} + +enum AppleAudioCategoryOption { + mixWithOthers, + duckOthers, + interruptSpokenAudioAndMixWithOthers, + allowBluetooth, + allowBluetoothA2DP, + allowAirPlay, + defaultToSpeaker, +} + +extension AppleAudioCategoryOptionEnumEx on String { + AppleAudioCategoryOption toAppleAudioCategoryOption() => + AppleAudioCategoryOption.values + .firstWhere((d) => d.name == toLowerCase()); +} + +class AppleAudioConfiguration { + AppleAudioConfiguration({ + this.appleAudioCategory, + this.appleAudioCategoryOptions, + this.appleAudioMode, + }); + final AppleAudioCategory? appleAudioCategory; + final Set? appleAudioCategoryOptions; + final AppleAudioMode? appleAudioMode; + + Map toMap() => { + if (appleAudioCategory != null) + 'appleAudioCategory': appleAudioCategory!.name, + if (appleAudioCategoryOptions != null) + 'appleAudioCategoryOptions': + appleAudioCategoryOptions!.map((e) => e.name).toList(), + if (appleAudioMode != null) 'appleAudioMode': appleAudioMode!.name, + }; +} + +enum AppleAudioIOMode { + none, + remoteOnly, + localOnly, + localAndRemote, +} + +class AppleNativeAudioManagement { + static AppleAudioIOMode currentMode = AppleAudioIOMode.none; + + static AppleAudioConfiguration getAppleAudioConfigurationForMode( + AppleAudioIOMode mode, + {bool preferSpeakerOutput = false}) { + currentMode = mode; + if (mode == AppleAudioIOMode.remoteOnly) { + return AppleAudioConfiguration( + appleAudioCategory: AppleAudioCategory.playback, + appleAudioCategoryOptions: { + AppleAudioCategoryOption.mixWithOthers, + }, + appleAudioMode: AppleAudioMode.spokenAudio, + ); + } else if ([ + AppleAudioIOMode.localOnly, + AppleAudioIOMode.localAndRemote, + ].contains(mode)) { + return AppleAudioConfiguration( + appleAudioCategory: AppleAudioCategory.playAndRecord, + appleAudioCategoryOptions: { + AppleAudioCategoryOption.allowBluetooth, + AppleAudioCategoryOption.mixWithOthers, + }, + appleAudioMode: preferSpeakerOutput + ? AppleAudioMode.videoChat + : AppleAudioMode.voiceChat, + ); + } + + return AppleAudioConfiguration( + appleAudioCategory: AppleAudioCategory.soloAmbient, + appleAudioCategoryOptions: {}, + appleAudioMode: AppleAudioMode.default_, + ); + } + + static Future setAppleAudioConfiguration( + AppleAudioConfiguration config) async { + if (WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'setAppleAudioConfiguration', + {'configuration': config.toMap()}, + ); + } + } +} diff --git a/lib/src/native/media_recorder_impl.dart b/lib/src/native/media_recorder_impl.dart new file mode 100644 index 0000000000..15f4ae22dd --- /dev/null +++ b/lib/src/native/media_recorder_impl.dart @@ -0,0 +1,58 @@ +import 'dart:async'; +import 'dart:math'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class MediaRecorderNative extends MediaRecorder { + MediaRecorderNative({ + String? albumName = 'FlutterWebRTC', + }) : _albumName = albumName; + static final _random = Random(); + final _recorderId = _random.nextInt(0x7FFFFFFF); + var _isStarted = false; + final String? _albumName; + + @override + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + }) async { + if (audioChannel == null && videoTrack == null) { + throw Exception('Neither audio nor video track were provided'); + } + + await WebRTC.invokeMethod('startRecordToFile', { + 'path': path, + if (audioChannel != null) 'audioChannel': audioChannel.index, + if (videoTrack != null) 'videoTrackId': videoTrack.id, + 'recorderId': _recorderId, + 'peerConnectionId': videoTrack is MediaStreamTrackNative + ? videoTrack.peerConnectionId + : null + }); + _isStarted = true; + } + + @override + void startWeb(MediaStream stream, + {Function(dynamic blob, bool isLastOne)? onDataChunk, + String? mimeType, + int timeSlice = 1000}) { + throw 'It\'s for Flutter Web only'; + } + + @override + Future stop() async { + if (!_isStarted) { + throw "Media recorder not started!"; + } + return await WebRTC.invokeMethod('stopRecordToFile', { + 'recorderId': _recorderId, + 'albumName': _albumName, + }); + } +} diff --git a/lib/src/native/media_stream_impl.dart b/lib/src/native/media_stream_impl.dart new file mode 100644 index 0000000000..49f27a7e5a --- /dev/null +++ b/lib/src/native/media_stream_impl.dart @@ -0,0 +1,110 @@ +import 'dart:async'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'factory_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class MediaStreamNative extends MediaStream { + MediaStreamNative(super.streamId, super.ownerTag); + + factory MediaStreamNative.fromMap(Map map) { + return MediaStreamNative(map['streamId'], map['ownerTag']) + ..setMediaTracks(map['audioTracks'], map['videoTracks']); + } + + final _audioTracks = []; + final _videoTracks = []; + + void setMediaTracks(List audioTracks, List videoTracks) { + _audioTracks.clear(); + + for (var track in audioTracks) { + _audioTracks.add(MediaStreamTrackNative(track['id'], track['label'], + track['kind'], track['enabled'], ownerTag, track['settings'] ?? {})); + } + + _videoTracks.clear(); + for (var track in videoTracks) { + _videoTracks.add(MediaStreamTrackNative(track['id'], track['label'], + track['kind'], track['enabled'], ownerTag, track['settings'] ?? {})); + } + } + + @override + List getTracks() { + return [..._audioTracks, ..._videoTracks]; + } + + @override + Future getMediaTracks() async { + final response = await WebRTC.invokeMethod( + 'mediaStreamGetTracks', + {'streamId': id}, + ); + + setMediaTracks(response['audioTracks'], response['videoTracks']); + } + + @override + Future addTrack(MediaStreamTrack track, + {bool addToNative = true}) async { + if (track.kind == 'audio') { + _audioTracks.add(track); + } else { + _videoTracks.add(track); + } + + if (addToNative) { + await WebRTC.invokeMethod('mediaStreamAddTrack', + {'streamId': id, 'trackId': track.id}); + } + } + + @override + Future removeTrack(MediaStreamTrack track, + {bool removeFromNative = true}) async { + if (track.kind == 'audio') { + _audioTracks.removeWhere((it) => it.id == track.id); + } else { + _videoTracks.removeWhere((it) => it.id == track.id); + } + + if (removeFromNative) { + await WebRTC.invokeMethod('mediaStreamRemoveTrack', + {'streamId': id, 'trackId': track.id}); + } + } + + @override + List getAudioTracks() { + return _audioTracks; + } + + @override + List getVideoTracks() { + return _videoTracks; + } + + @override + Future dispose() async { + await WebRTC.invokeMethod( + 'streamDispose', + {'streamId': id}, + ); + } + + @override + // TODO(cloudwebrtc): Implement + bool get active => throw UnimplementedError(); + + @override + Future clone() async { + final cloneStream = await createLocalMediaStream(id); + for (var track in [..._audioTracks, ..._videoTracks]) { + await cloneStream.addTrack(track); + } + return cloneStream; + } +} diff --git a/lib/src/native/media_stream_track_impl.dart b/lib/src/native/media_stream_track_impl.dart new file mode 100644 index 0000000000..e5c714f6f4 --- /dev/null +++ b/lib/src/native/media_stream_track_impl.dart @@ -0,0 +1,132 @@ +import 'dart:async'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:path_provider/path_provider.dart'; +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../helper.dart'; +import 'utils.dart'; + +class MediaStreamTrackNative extends MediaStreamTrack { + MediaStreamTrackNative(this._trackId, this._label, this._kind, this._enabled, + this._peerConnectionId, + [this.settings_ = const {}]); + + factory MediaStreamTrackNative.fromMap( + Map map, String peerConnectionId) { + return MediaStreamTrackNative(map['id'], map['label'], map['kind'], + map['enabled'], peerConnectionId, map['settings'] ?? {}); + } + final String _trackId; + final String _label; + final String _kind; + final String _peerConnectionId; + final Map settings_; + + bool _enabled; + + bool _muted = false; + + String get peerConnectionId => _peerConnectionId; + + @override + set enabled(bool enabled) { + WebRTC.invokeMethod('mediaStreamTrackSetEnable', { + 'trackId': _trackId, + 'enabled': enabled, + 'peerConnectionId': _peerConnectionId + }); + _enabled = enabled; + + if (kind == 'audio') { + _muted = !enabled; + muted ? onMute?.call() : onUnMute?.call(); + } + } + + @override + bool get enabled => _enabled; + + @override + String get label => _label; + + @override + String get kind => _kind; + + @override + String get id => _trackId; + + @override + bool get muted => _muted; + + @override + Future hasTorch() => WebRTC.invokeMethod( + 'mediaStreamTrackHasTorch', + {'trackId': _trackId}, + ).then((value) => value ?? false); + + @override + Future setTorch(bool torch) => WebRTC.invokeMethod( + 'mediaStreamTrackSetTorch', + {'trackId': _trackId, 'torch': torch}, + ); + + @override + Future switchCamera() => Helper.switchCamera(this); + + Future setZoom(double zoomLevel) => Helper.setZoom(this, zoomLevel); + + @Deprecated('Use Helper.setSpeakerphoneOn instead') + @override + void enableSpeakerphone(bool enable) async { + return Helper.setSpeakerphoneOn(enable); + } + + @override + Future captureFrame() async { + var filePath = await getTemporaryDirectory(); + await WebRTC.invokeMethod( + 'captureFrame', + { + 'trackId': _trackId, + 'peerConnectionId': _peerConnectionId, + 'path': '${filePath.path}/captureFrame.png' + }, + ); + return File('${filePath.path}/captureFrame.png') + .readAsBytes() + .then((value) => value.buffer); + } + + @override + Future applyConstraints([Map? constraints]) { + if (constraints == null) return Future.value(); + + var current = getConstraints(); + if (constraints.containsKey('volume') && + current['volume'] != constraints['volume']) { + Helper.setVolume(constraints['volume'], this); + } + + return Future.value(); + } + + @override + Map getSettings() { + return settings_.map((key, value) => MapEntry(key.toString(), value)); + } + + @override + Future dispose() async { + return stop(); + } + + @override + Future stop() async { + await WebRTC.invokeMethod( + 'trackDispose', + {'trackId': _trackId}, + ); + } +} diff --git a/lib/src/native/mediadevices_impl.dart b/lib/src/native/mediadevices_impl.dart new file mode 100644 index 0000000000..47f585615c --- /dev/null +++ b/lib/src/native/mediadevices_impl.dart @@ -0,0 +1,111 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'event_channel.dart'; +import 'media_stream_impl.dart'; +import 'utils.dart'; + +class MediaDeviceNative extends MediaDevices { + MediaDeviceNative._internal() { + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data.values.first; + handleEvent(event, map); + }); + } + + static final MediaDeviceNative instance = MediaDeviceNative._internal(); + + void handleEvent(String event, final Map map) async { + switch (map['event']) { + case 'onDeviceChange': + ondevicechange?.call(null); + break; + } + } + + @override + Future getUserMedia( + Map mediaConstraints) async { + try { + final response = await WebRTC.invokeMethod( + 'getUserMedia', + {'constraints': mediaConstraints}, + ); + if (response == null) { + throw Exception('getUserMedia return null, something wrong'); + } + + String streamId = response['streamId']; + var stream = MediaStreamNative(streamId, 'local'); + stream.setMediaTracks( + response['audioTracks'] ?? [], response['videoTracks'] ?? []); + return stream; + } on PlatformException catch (e) { + throw 'Unable to getUserMedia: ${e.message}'; + } + } + + @override + Future getDisplayMedia( + Map mediaConstraints) async { + try { + final response = await WebRTC.invokeMethod( + 'getDisplayMedia', + {'constraints': mediaConstraints}, + ); + if (response == null) { + throw Exception('getDisplayMedia return null, something wrong'); + } + String streamId = response['streamId']; + var stream = MediaStreamNative(streamId, 'local'); + stream.setMediaTracks(response['audioTracks'], response['videoTracks']); + return stream; + } on PlatformException catch (e) { + throw 'Unable to getDisplayMedia: ${e.message}'; + } + } + + @override + Future> getSources() async { + try { + final response = await WebRTC.invokeMethod( + 'getSources', + {}, + ); + + List sources = response['sources']; + + return sources; + } on PlatformException catch (e) { + throw 'Unable to getSources: ${e.message}'; + } + } + + @override + Future> enumerateDevices() async { + var source = await getSources(); + return source + .map( + (e) => MediaDeviceInfo( + deviceId: e['deviceId'], + groupId: e['groupId'], + kind: e['kind'], + label: e['label']), + ) + .toList(); + } + + @override + Future selectAudioOutput( + [AudioOutputOptions? options]) async { + await WebRTC.invokeMethod('selectAudioOutput', { + 'deviceId': options?.deviceId, + }); + // TODO(cloudwebrtc): return the selected device + return MediaDeviceInfo(label: 'label', deviceId: options!.deviceId); + } +} diff --git a/lib/src/native/navigator_impl.dart b/lib/src/native/navigator_impl.dart new file mode 100644 index 0000000000..cafe6d7dce --- /dev/null +++ b/lib/src/native/navigator_impl.dart @@ -0,0 +1,27 @@ +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'mediadevices_impl.dart'; + +class NavigatorNative extends Navigator { + NavigatorNative._internal(); + + static final NavigatorNative instance = NavigatorNative._internal(); + + @override + Future getDisplayMedia(Map mediaConstraints) { + return mediaDevices.getDisplayMedia(mediaConstraints); + } + + @override + Future getSources() { + return mediaDevices.enumerateDevices(); + } + + @override + Future getUserMedia(Map mediaConstraints) { + return mediaDevices.getUserMedia(mediaConstraints); + } + + @override + MediaDevices get mediaDevices => MediaDeviceNative.instance; +} diff --git a/lib/src/native/rtc_data_channel_impl.dart b/lib/src/native/rtc_data_channel_impl.dart new file mode 100644 index 0000000000..2c3c84b676 --- /dev/null +++ b/lib/src/native/rtc_data_channel_impl.dart @@ -0,0 +1,143 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +final _typeStringToMessageType = { + 'text': MessageType.text, + 'binary': MessageType.binary +}; + +/// A class that represents a WebRTC datachannel. +/// Can send and receive text and binary messages. +class RTCDataChannelNative extends RTCDataChannel { + RTCDataChannelNative( + this._peerConnectionId, this._label, this._dataChannelId, this._flutterId, + {RTCDataChannelState? state}) { + stateChangeStream = _stateChangeController.stream; + messageStream = _messageController.stream; + if (state != null) { + _state = state; + } + _eventSubscription = _eventChannelFor(_peerConnectionId, _flutterId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + final String _peerConnectionId; + final String _label; + int _bufferedAmount = 0; + @override + // ignore: overridden_fields + int? bufferedAmountLowThreshold; + + /// Id for the datachannel in the Flutter <-> Native layer. + final String _flutterId; + + int? _dataChannelId; + RTCDataChannelState? _state; + StreamSubscription? _eventSubscription; + + @override + RTCDataChannelState? get state => _state; + + @override + int? get id => _dataChannelId; + + /// Get label. + @override + String? get label => _label; + + @override + int? get bufferedAmount => _bufferedAmount; + + final _stateChangeController = + StreamController.broadcast(sync: true); + final _messageController = + StreamController.broadcast(sync: true); + + /// RTCDataChannel event listener. + void eventListener(dynamic event) { + final Map map = event; + switch (map['event']) { + case 'dataChannelStateChanged': + _dataChannelId = map['id']; + _state = rtcDataChannelStateForString(map['state']); + onDataChannelState?.call(_state!); + + _stateChangeController.add(_state!); + break; + case 'dataChannelReceiveMessage': + _dataChannelId = map['id']; + + var type = _typeStringToMessageType[map['type']]; + dynamic data = map['data']; + RTCDataChannelMessage message; + if (type == MessageType.binary) { + message = RTCDataChannelMessage.fromBinary(data); + } else { + message = RTCDataChannelMessage(data); + } + + onMessage?.call(message); + + _messageController.add(message); + break; + + case 'dataChannelBufferedAmountChange': + _bufferedAmount = map['bufferedAmount']; + if (bufferedAmountLowThreshold != null) { + if (_bufferedAmount < bufferedAmountLowThreshold!) { + onBufferedAmountLow?.call(_bufferedAmount); + } + } + onBufferedAmountChange?.call(_bufferedAmount, map['changedAmount']); + break; + } + } + + EventChannel _eventChannelFor(String peerConnectionId, String flutterId) { + return EventChannel( + 'FlutterWebRTC/dataChannelEvent$peerConnectionId$flutterId'); + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + Future getBufferedAmount() async { + final Map response = await WebRTC.invokeMethod( + 'dataChannelGetBufferedAmount', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _flutterId + }); + _bufferedAmount = response['bufferedAmount']; + return _bufferedAmount; + } + + @override + Future send(RTCDataChannelMessage message) async { + await WebRTC.invokeMethod('dataChannelSend', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _flutterId, + 'type': message.isBinary ? 'binary' : 'text', + 'data': message.isBinary ? message.binary : message.text, + }); + } + + @override + Future close() async { + await _stateChangeController.close(); + await _messageController.close(); + await _eventSubscription?.cancel(); + await WebRTC.invokeMethod('dataChannelClose', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _flutterId + }); + } +} diff --git a/lib/src/native/rtc_dtmf_sender_impl.dart b/lib/src/native/rtc_dtmf_sender_impl.dart new file mode 100644 index 0000000000..4eb7fd9878 --- /dev/null +++ b/lib/src/native/rtc_dtmf_sender_impl.dart @@ -0,0 +1,30 @@ +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +class RTCDTMFSenderNative extends RTCDTMFSender { + RTCDTMFSenderNative(this._peerConnectionId, this._rtpSenderId); + // peer connection Id must be defined as a variable where this function will be called. + final String _peerConnectionId; + final String _rtpSenderId; + + @override + Future insertDTMF(String tones, + {int duration = 100, int interToneGap = 70}) async { + await WebRTC.invokeMethod('sendDtmf', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId, + 'tone': tones, + 'duration': duration, + 'gap': interToneGap, + }); + } + + @override + Future canInsertDtmf() async { + return await WebRTC.invokeMethod('canInsertDtmf', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId + }); + } +} diff --git a/lib/src/native/rtc_peerconnection_impl.dart b/lib/src/native/rtc_peerconnection_impl.dart new file mode 100644 index 0000000000..3fe69ab63e --- /dev/null +++ b/lib/src/native/rtc_peerconnection_impl.dart @@ -0,0 +1,622 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_data_channel_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; +import 'utils.dart'; + +/* + * PeerConnection + */ +class RTCPeerConnectionNative extends RTCPeerConnection { + RTCPeerConnectionNative(this._peerConnectionId, this._configuration) { + _eventSubscription = _eventChannelFor(_peerConnectionId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + // private: + final String _peerConnectionId; + StreamSubscription? _eventSubscription; + final _localStreams = []; + final _remoteStreams = []; + RTCDataChannelNative? _dataChannel; + Map _configuration; + RTCSignalingState? _signalingState; + RTCIceGatheringState? _iceGatheringState; + RTCIceConnectionState? _iceConnectionState; + RTCPeerConnectionState? _connectionState; + + final Map defaultSdpConstraints = { + 'mandatory': { + 'OfferToReceiveAudio': true, + 'OfferToReceiveVideo': true, + }, + 'optional': [], + }; + + @override + RTCSignalingState? get signalingState => _signalingState; + + @override + Future getSignalingState() async { + try { + final response = + await WebRTC.invokeMethod('getSignalingState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _signalingState = signalingStateForString(response['state']); + return _signalingState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getSignalingState: ${e.message}'; + } + } + + @override + RTCIceGatheringState? get iceGatheringState => _iceGatheringState; + + @override + Future getIceGatheringState() async { + try { + final response = + await WebRTC.invokeMethod('getIceGatheringState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _iceGatheringState = iceGatheringStateforString(response['state']); + return _iceGatheringState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getIceGatheringState: ${e.message}'; + } + } + + @override + RTCIceConnectionState? get iceConnectionState => _iceConnectionState; + + @override + Future getIceConnectionState() async { + try { + final response = + await WebRTC.invokeMethod('getIceConnectionState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _iceConnectionState = iceConnectionStateForString(response['state']); + return _iceConnectionState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getIceConnectionState: ${e.message}'; + } + } + + @override + RTCPeerConnectionState? get connectionState => _connectionState; + + @override + Future getConnectionState() async { + try { + final response = + await WebRTC.invokeMethod('getConnectionState', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + _connectionState = peerConnectionStateForString(response['state']); + return _connectionState; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getConnectionState: ${e.message}'; + } + } + + Future get localDescription => getLocalDescription(); + + Future get remoteDescription => + getRemoteDescription(); + + /* + * PeerConnection event listener. + */ + void eventListener(dynamic event) { + final Map map = event; + + switch (map['event']) { + case 'signalingState': + _signalingState = signalingStateForString(map['state']); + onSignalingState?.call(_signalingState!); + break; + case 'peerConnectionState': + _connectionState = peerConnectionStateForString(map['state']); + onConnectionState?.call(_connectionState!); + break; + case 'iceGatheringState': + _iceGatheringState = iceGatheringStateforString(map['state']); + onIceGatheringState?.call(_iceGatheringState!); + break; + case 'iceConnectionState': + _iceConnectionState = iceConnectionStateForString(map['state']); + onIceConnectionState?.call(_iceConnectionState!); + break; + case 'onCandidate': + Map cand = map['candidate']; + var candidate = RTCIceCandidate( + cand['candidate'], cand['sdpMid'], cand['sdpMLineIndex']); + onIceCandidate?.call(candidate); + break; + case 'onAddStream': + String streamId = map['streamId']; + + var stream = + _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { + var newStream = MediaStreamNative(streamId, _peerConnectionId); + newStream.setMediaTracks(map['audioTracks'], map['videoTracks']); + return newStream; + }); + + onAddStream?.call(stream); + _remoteStreams.add(stream); + break; + case 'onRemoveStream': + String streamId = map['streamId']; + + for (var item in _remoteStreams) { + if (item.id == streamId) { + onRemoveStream?.call(item); + break; + } + } + _remoteStreams.removeWhere((it) => it.id == streamId); + break; + case 'onAddTrack': + String streamId = map['streamId']; + Map track = map['track']; + + var newTrack = MediaStreamTrackNative( + track['id'], + track['label'], + track['kind'], + track['enabled'], + _peerConnectionId, + track['settings'] ?? {}); + String kind = track['kind']; + + var stream = + _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { + var newStream = MediaStreamNative(streamId, _peerConnectionId); + _remoteStreams.add(newStream); + return newStream; + }); + + var oldTracks = (kind == 'audio') + ? stream.getAudioTracks() + : stream.getVideoTracks(); + var oldTrack = oldTracks.isNotEmpty ? oldTracks[0] : null; + if (oldTrack != null) { + stream.removeTrack(oldTrack, removeFromNative: false); + onRemoveTrack?.call(stream, oldTrack); + } + + stream.addTrack(newTrack, addToNative: false); + onAddTrack?.call(stream, newTrack); + break; + case 'onRemoveTrack': + String trackId = map['trackId']; + for (var stream in _remoteStreams) { + stream.getTracks().forEach((track) { + if (track.id == trackId) { + onRemoveTrack?.call(stream, track); + stream.removeTrack(track, removeFromNative: false); + return; + } + }); + } + break; + case 'didOpenDataChannel': + int dataChannelId = map['id']; + String label = map['label']; + String flutterId = map['flutterId']; + _dataChannel = RTCDataChannelNative( + _peerConnectionId, label, dataChannelId, flutterId, + state: RTCDataChannelState.RTCDataChannelOpen); + onDataChannel?.call(_dataChannel!); + break; + case 'onRenegotiationNeeded': + onRenegotiationNeeded?.call(); + break; + + /// Unified-Plan + case 'onTrack': + var params = map['streams'] as List; + var streams = params.map((e) => MediaStreamNative.fromMap(e)).toList(); + var transceiver = map['transceiver'] != null + ? RTCRtpTransceiverNative.fromMap(map['transceiver'], + peerConnectionId: _peerConnectionId) + : null; + onTrack?.call(RTCTrackEvent( + receiver: RTCRtpReceiverNative.fromMap(map['receiver'], + peerConnectionId: _peerConnectionId), + streams: streams, + track: + MediaStreamTrackNative.fromMap(map['track'], _peerConnectionId), + transceiver: transceiver)); + break; + + /// Other + case 'onSelectedCandidatePairChanged': + + /// class RTCIceCandidatePair { + /// RTCIceCandidatePair(this.local, this.remote, this.lastReceivedMs, this.reason); + /// factory RTCIceCandidatePair.fromMap(Map map) { + /// return RTCIceCandidatePair( + /// RTCIceCandidate.fromMap(map['local']), + /// RTCIceCandidate.fromMap(map['remote']), + /// map['lastReceivedMs'], + /// map['reason']); + /// } + /// RTCIceCandidate local; + /// RTCIceCandidate remote; + /// int lastReceivedMs; + /// String reason; + /// } + /// + /// typedef SelectedCandidatePairChangedCallback = void Function(RTCIceCandidatePair pair); + /// SelectedCandidatePairChangedCallback onSelectedCandidatePairChanged; + /// + /// RTCIceCandidatePair iceCandidatePair = RTCIceCandidatePair.fromMap(map); + /// onSelectedCandidatePairChanged?.call(iceCandidatePair); + + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) throw obj; + } + + @override + Future dispose() async { + await _eventSubscription?.cancel(); + await WebRTC.invokeMethod( + 'peerConnectionDispose', + {'peerConnectionId': _peerConnectionId}, + ); + } + + EventChannel _eventChannelFor(String peerConnectionId) { + return EventChannel('FlutterWebRTC/peerConnectionEvent$peerConnectionId'); + } + + @override + Map get getConfiguration => _configuration; + + @override + Future setConfiguration(Map configuration) async { + _configuration = configuration; + try { + await WebRTC.invokeMethod('setConfiguration', { + 'peerConnectionId': _peerConnectionId, + 'configuration': configuration, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::setConfiguration: ${e.message}'; + } + } + + @override + Future createOffer( + [Map? constraints]) async { + try { + final response = + await WebRTC.invokeMethod('createOffer', { + 'peerConnectionId': _peerConnectionId, + 'constraints': constraints ?? defaultSdpConstraints + }); + + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createOffer: ${e.message}'; + } + } + + @override + Future createAnswer( + [Map? constraints]) async { + try { + final response = + await WebRTC.invokeMethod('createAnswer', { + 'peerConnectionId': _peerConnectionId, + 'constraints': constraints ?? defaultSdpConstraints + }); + + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createAnswer: ${e.message}'; + } + } + + @override + Future addStream(MediaStream stream) async { + _localStreams.add(stream); + await WebRTC.invokeMethod('addStream', { + 'peerConnectionId': _peerConnectionId, + 'streamId': stream.id, + }); + } + + @override + Future removeStream(MediaStream stream) async { + _localStreams.removeWhere((it) => it.id == stream.id); + await WebRTC.invokeMethod('removeStream', { + 'peerConnectionId': _peerConnectionId, + 'streamId': stream.id, + }); + } + + @override + Future setLocalDescription(RTCSessionDescription description) async { + try { + await WebRTC.invokeMethod('setLocalDescription', { + 'peerConnectionId': _peerConnectionId, + 'description': description.toMap(), + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::setLocalDescription: ${e.message}'; + } + } + + @override + Future setRemoteDescription(RTCSessionDescription description) async { + try { + await WebRTC.invokeMethod('setRemoteDescription', { + 'peerConnectionId': _peerConnectionId, + 'description': description.toMap(), + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::setRemoteDescription: ${e.message}'; + } + } + + @override + Future getLocalDescription() async { + try { + final response = + await WebRTC.invokeMethod('getLocalDescription', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getLocalDescription: ${e.message}'; + } + } + + @override + Future getRemoteDescription() async { + try { + final response = + await WebRTC.invokeMethod('getRemoteDescription', { + 'peerConnectionId': _peerConnectionId, + }); + + if (null == response) { + return null; + } + String sdp = response['sdp']; + String type = response['type']; + return RTCSessionDescription(sdp, type); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getRemoteDescription: ${e.message}'; + } + } + + @override + Future addCandidate(RTCIceCandidate candidate) async { + try { + await WebRTC.invokeMethod('addCandidate', { + 'peerConnectionId': _peerConnectionId, + 'candidate': candidate.toMap(), + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addCandidate: ${e.message}'; + } + } + + @override + Future> getStats([MediaStreamTrack? track]) async { + try { + final response = await WebRTC.invokeMethod('getStats', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track?.id + }); + + var stats = []; + if (response != null) { + List reports = response['stats']; + for (var report in reports) { + stats.add(StatsReport(report['id'], report['type'], + (report['timestamp'] as num).toDouble(), report['values'])); + } + } + return stats; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::getStats: ${e.message}'; + } + } + + @override + List getLocalStreams() { + return _localStreams; + } + + @override + List getRemoteStreams() { + return _remoteStreams; + } + + @override + Future createDataChannel( + String label, RTCDataChannelInit dataChannelDict) async { + try { + final response = + await WebRTC.invokeMethod('createDataChannel', { + 'peerConnectionId': _peerConnectionId, + 'label': label, + 'dataChannelDict': dataChannelDict.toMap() + }); + + _dataChannel = RTCDataChannelNative( + _peerConnectionId, label, response['id'], response['flutterId']); + return _dataChannel!; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createDataChannel: ${e.message}'; + } + } + + @override + RTCDTMFSender createDtmfSender(MediaStreamTrack track) { + return RTCDTMFSenderNative(_peerConnectionId, ''); + } + + @override + Future restartIce() async { + try { + await WebRTC.invokeMethod('restartIce', { + 'peerConnectionId': _peerConnectionId, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::resartIce: ${e.message}'; + } + } + + @override + Future close() async { + try { + await WebRTC.invokeMethod('peerConnectionClose', { + 'peerConnectionId': _peerConnectionId, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::close: ${e.message}'; + } + } + + /// Unified-Plan. + @override + Future> getSenders() async { + try { + final response = await WebRTC.invokeMethod('getSenders', + {'peerConnectionId': _peerConnectionId}); + return RTCRtpSenderNative.fromMaps(response['senders'], + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future> getReceivers() async { + try { + final response = await WebRTC.invokeMethod('getReceivers', + {'peerConnectionId': _peerConnectionId}); + return RTCRtpReceiverNative.fromMaps(response['receivers'], + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future> getTransceivers() async { + try { + final response = await WebRTC.invokeMethod('getTransceivers', + {'peerConnectionId': _peerConnectionId}); + return RTCRtpTransceiverNative.fromMaps(response['transceivers'], + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future addTrack(MediaStreamTrack track, + [MediaStream? stream]) async { + try { + final response = await WebRTC.invokeMethod('addTrack', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id, + 'streamIds': [stream?.id] + }); + return RTCRtpSenderNative.fromMap(response, + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future removeTrack(RTCRtpSender sender) async { + try { + final response = await WebRTC.invokeMethod( + 'removeTrack', { + 'peerConnectionId': _peerConnectionId, + 'senderId': sender.senderId + }); + bool result = response['result']; + + if (result && (sender is RTCRtpSenderNative)) { + sender.removeTrackReference(); + } + + return result; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}'; + } + } + + @override + Future addTransceiver( + {MediaStreamTrack? track, + RTCRtpMediaType? kind, + RTCRtpTransceiverInit? init}) async { + try { + final response = + await WebRTC.invokeMethod('addTransceiver', { + 'peerConnectionId': _peerConnectionId, + if (track != null) 'trackId': track.id, + if (kind != null) 'mediaType': typeRTCRtpMediaTypetoString[kind], + if (init != null) + 'transceiverInit': RTCRtpTransceiverInitNative.initToMap(init) + }); + return RTCRtpTransceiverNative.fromMap(response, + peerConnectionId: _peerConnectionId); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; + } + } +} diff --git a/lib/src/native/rtc_rtp_receiver_impl.dart b/lib/src/native/rtc_rtp_receiver_impl.dart new file mode 100644 index 0000000000..4b3e8a6101 --- /dev/null +++ b/lib/src/native/rtc_rtp_receiver_impl.dart @@ -0,0 +1,68 @@ +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'utils.dart'; + +class RTCRtpReceiverNative extends RTCRtpReceiver { + RTCRtpReceiverNative( + this._id, this._track, this._parameters, this._peerConnectionId); + + factory RTCRtpReceiverNative.fromMap(Map map, + {required String peerConnectionId}) { + var track = MediaStreamTrackNative.fromMap(map['track'], peerConnectionId); + var parameters = RTCRtpParameters.fromMap(map['rtpParameters']); + return RTCRtpReceiverNative( + map['receiverId'], track, parameters, peerConnectionId); + } + + static List fromMaps(List map, + {required String peerConnectionId}) { + return map + .map((e) => + RTCRtpReceiverNative.fromMap(e, peerConnectionId: peerConnectionId)) + .toList(); + } + + @override + Future> getStats() async { + try { + final response = await WebRTC.invokeMethod('getStats', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id + }); + var stats = []; + if (response != null) { + List reports = response['stats']; + for (var report in reports) { + stats.add(StatsReport(report['id'], report['type'], + report['timestamp'], report['values'])); + } + } + return stats; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpReceiverNative::getStats: ${e.message}'; + } + } + + /// private: + String _id; + String _peerConnectionId; + MediaStreamTrack _track; + RTCRtpParameters _parameters; + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack get track => _track; + + @override + String get receiverId => _id; + + String get peerConnectionId => _peerConnectionId; +} diff --git a/lib/src/native/rtc_rtp_sender_impl.dart b/lib/src/native/rtc_rtp_sender_impl.dart new file mode 100644 index 0000000000..81fb450307 --- /dev/null +++ b/lib/src/native/rtc_rtp_sender_impl.dart @@ -0,0 +1,159 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_track_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; +import 'utils.dart'; + +class RTCRtpSenderNative extends RTCRtpSender { + RTCRtpSenderNative(this._id, this._track, this._dtmf, this._parameters, + this._ownsTrack, this._peerConnectionId); + + factory RTCRtpSenderNative.fromMap(Map map, + {required String peerConnectionId}) { + Map trackMap = map['track']; + return RTCRtpSenderNative( + map['senderId'], + (trackMap.isNotEmpty) + ? MediaStreamTrackNative.fromMap(map['track'], peerConnectionId) + : null, + RTCDTMFSenderNative(peerConnectionId, map['senderId']), + RTCRtpParameters.fromMap(map['rtpParameters']), + map['ownsTrack'], + peerConnectionId); + } + + static List fromMaps(List map, + {required String peerConnectionId}) { + return map + .map((e) => + RTCRtpSenderNative.fromMap(e, peerConnectionId: peerConnectionId)) + .toList(); + } + + String _peerConnectionId; + String _id; + MediaStreamTrack? _track; + final Set _streams = {}; + RTCDTMFSender _dtmf; + RTCRtpParameters _parameters; + bool _ownsTrack = false; + + @override + Future> getStats() async { + try { + final response = await WebRTC.invokeMethod('getStats', { + 'peerConnectionId': _peerConnectionId, + if (track != null) 'trackId': track!.id, + }); + var stats = []; + if (response != null) { + List reports = response['stats']; + for (var report in reports) { + stats.add(StatsReport(report['id'], report['type'], + report['timestamp'], report['values'])); + } + } + return stats; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::getStats: ${e.message}'; + } + } + + @override + Future setParameters(RTCRtpParameters parameters) async { + _parameters = parameters; + try { + final response = + await WebRTC.invokeMethod('rtpSenderSetParameters', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'parameters': parameters.toMap() + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setParameters: ${e.message}'; + } + } + + @override + Future replaceTrack(MediaStreamTrack? track) async { + try { + await WebRTC.invokeMethod('rtpSenderReplaceTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track != null ? track.id : '' + }); + + // change reference of associated MediaTrack + _track = track; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::replaceTrack: ${e.message}'; + } + } + + @override + Future setTrack(MediaStreamTrack? track, + {bool takeOwnership = true}) async { + try { + await WebRTC.invokeMethod('rtpSenderSetTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track != null ? track.id : '', + 'takeOwnership': takeOwnership, + }); + + // change reference of associated MediaTrack + _track = track; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSenderNative::setTrack: ${e.message}'; + } + } + + @override + Future setStreams(List streams) async { + try { + await WebRTC.invokeMethod('rtpSenderSetStreams', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'streamIds': streams.map((e) => e.id).toList(), + }); + + // change reference of associated MediaTrack + _streams.addAll(streams); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setTrack: ${e.message}'; + } + } + + void removeTrackReference() { + _track = null; + } + + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack? get track => _track; + + @override + String get senderId => _id; + + @override + bool get ownsTrack => _ownsTrack; + + @override + RTCDTMFSender get dtmfSender => _dtmf; + + String get peerConnectionId => _peerConnectionId; + + @Deprecated( + 'No need to dispose rtpSender as it is handled by peerConnection.') + @override + @mustCallSuper + Future dispose() async {} +} diff --git a/lib/src/native/rtc_rtp_transceiver_impl.dart b/lib/src/native/rtc_rtp_transceiver_impl.dart new file mode 100644 index 0000000000..7bc4e19090 --- /dev/null +++ b/lib/src/native/rtc_rtp_transceiver_impl.dart @@ -0,0 +1,183 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'utils.dart'; + +List listToRtpEncodings(List> list) { + return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); +} + +class RTCRtpTransceiverInitNative extends RTCRtpTransceiverInit { + RTCRtpTransceiverInitNative(TransceiverDirection direction, + List streams, List sendEncodings) + : super( + direction: direction, + streams: streams, + sendEncodings: sendEncodings); + + factory RTCRtpTransceiverInitNative.fromMap(Map map) { + return RTCRtpTransceiverInitNative( + typeStringToRtpTransceiverDirection[map['direction']]!, + (map['streams'] as List) + .map((e) => MediaStreamNative.fromMap(map)) + .toList(), + listToRtpEncodings(map['sendEncodings'])); + } + + Map toMap() { + return { + 'direction': typeRtpTransceiverDirectionToString[direction], + if (streams != null) 'streamIds': streams!.map((e) => e.id).toList(), + if (sendEncodings != null) + 'sendEncodings': sendEncodings!.map((e) => e.toMap()).toList(), + }; + } + + static Map initToMap(RTCRtpTransceiverInit init) { + return { + 'direction': typeRtpTransceiverDirectionToString[init.direction], + if (init.streams != null) + 'streamIds': init.streams!.map((e) => e.id).toList(), + if (init.sendEncodings != null) + 'sendEncodings': init.sendEncodings!.map((e) => e.toMap()).toList(), + }; + } +} + +class RTCRtpTransceiverNative extends RTCRtpTransceiver { + RTCRtpTransceiverNative( + this._id, + this._direction, + this._mid, + this._sender, + this._receiver, + this._peerConnectionId, + ); + + factory RTCRtpTransceiverNative.fromMap(Map map, + {required String peerConnectionId}) { + var transceiver = RTCRtpTransceiverNative( + map['transceiverId'] ?? '', + typeStringToRtpTransceiverDirection[map['direction']]!, + map['mid'] ?? '', + RTCRtpSenderNative.fromMap(map['sender'], + peerConnectionId: peerConnectionId), + RTCRtpReceiverNative.fromMap(map['receiver'], + peerConnectionId: peerConnectionId), + peerConnectionId); + return transceiver; + } + + static List fromMaps(List map, + {required String peerConnectionId}) { + return map + .map((e) => RTCRtpTransceiverNative.fromMap(e, + peerConnectionId: peerConnectionId)) + .toList(); + } + + String _peerConnectionId; + String _id; + bool _stop = false; + TransceiverDirection _direction; + String _mid; + RTCRtpSender _sender; + RTCRtpReceiver _receiver; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + @override + String get mid => _mid; + + @override + RTCRtpSender get sender => _sender; + + @override + RTCRtpReceiver get receiver => _receiver; + + @override + bool get stoped => _stop; + + @override + String get transceiverId => _id; + + @override + Future setDirection(TransceiverDirection direction) async { + try { + await WebRTC.invokeMethod('rtpTransceiverSetDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id, + 'direction': typeRtpTransceiverDirectionToString[direction] + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::setDirection: ${e.message}'; + } + } + + @override + Future getCurrentDirection() async { + try { + final response = await WebRTC.invokeMethod( + 'rtpTransceiverGetCurrentDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + return response != null + ? typeStringToRtpTransceiverDirection[response['result']] + : null; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::getCurrentDirection: ${e.message}'; + } + } + + @override + Future getDirection() async { + try { + final response = await WebRTC.invokeMethod( + 'rtpTransceiverGetDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + + _direction = typeStringToRtpTransceiverDirection[response['result']]!; + return _direction; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::getDirection: ${e.message}'; + } + } + + @override + Future stop() async { + try { + await WebRTC.invokeMethod('rtpTransceiverStop', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + + _stop = true; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::stop: ${e.message}'; + } + } + + @override + Future setCodecPreferences(List codecs) async { + try { + await WebRTC.invokeMethod('setCodecPreferences', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id, + 'codecs': codecs.map((e) => e.toMap()).toList() + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::setCodecPreferences: ${e.message}'; + } + } +} diff --git a/lib/src/native/rtc_track_event_impl.dart b/lib/src/native/rtc_track_event_impl.dart new file mode 100644 index 0000000000..760afb15d2 --- /dev/null +++ b/lib/src/native/rtc_track_event_impl.dart @@ -0,0 +1,30 @@ +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; + +class RTCTrackEventNative extends RTCTrackEvent { + RTCTrackEventNative(RTCRtpReceiver receiver, List streams, + MediaStreamTrack track, RTCRtpTransceiver transceiver) + : super( + receiver: receiver, + streams: streams, + track: track, + transceiver: transceiver); + + factory RTCTrackEventNative.fromMap( + Map map, String peerConnectionId) { + var streamsParams = map['streams'] as List>; + var streams = + streamsParams.map((e) => MediaStreamNative.fromMap(e)).toList(); + return RTCTrackEventNative( + RTCRtpReceiverNative.fromMap(map['receiver'], + peerConnectionId: peerConnectionId), + streams, + MediaStreamTrackNative.fromMap(map['track'], peerConnectionId), + RTCRtpTransceiverNative.fromMap(map['transceiver'], + peerConnectionId: peerConnectionId)); + } +} diff --git a/lib/src/native/rtc_video_platform_view.dart b/lib/src/native/rtc_video_platform_view.dart new file mode 100644 index 0000000000..0dd5cb6bcf --- /dev/null +++ b/lib/src/native/rtc_video_platform_view.dart @@ -0,0 +1,102 @@ +import 'dart:math'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter/widgets.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_video_platform_view_controller.dart'; + +class RTCVideoPlatFormView extends StatefulWidget { + const RTCVideoPlatFormView({ + super.key, + required this.onViewReady, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + }); + final void Function(RTCVideoPlatformViewController)? onViewReady; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + @override + NativeVideoPlayerViewState createState() => NativeVideoPlayerViewState(); +} + +class NativeVideoPlayerViewState extends State { + RTCVideoPlatformViewController? _controller; + bool _showVideoView = false; + @override + void dispose() { + _controller?.onFirstFrameRendered = null; + _controller?.onSrcObjectChange = null; + _controller?.onResize = null; + _controller = null; + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) => + _buildVideoView(context, constraints)); + } + + Widget _buildVideoView(BuildContext context, BoxConstraints constraints) { + return Center( + child: FittedBox( + clipBehavior: Clip.hardEdge, + fit: widget.objectFit == + RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? BoxFit.contain + : BoxFit.cover, + child: Center( + child: SizedBox( + width: _showVideoView + ? widget.objectFit == + RTCVideoViewObjectFit.RTCVideoViewObjectFitCover + ? constraints.maxWidth + : constraints.maxHeight * + (_controller?.value.aspectRatio ?? 1.0) + : 0.1, + height: _showVideoView ? constraints.maxHeight : 0.1, + child: Transform( + transform: Matrix4.identity()..rotateY(widget.mirror ? -pi : 0.0), + alignment: FractionalOffset.center, + child: _buildNativeView(), + ), + ), + ), + ), + ); + } + + Widget _buildNativeView() { + const viewType = 'rtc_video_platform_view'; + if (defaultTargetPlatform == TargetPlatform.iOS) { + return UiKitView( + viewType: viewType, + onPlatformViewCreated: onPlatformViewCreated, + creationParams: {}, + creationParamsCodec: const StandardMessageCodec(), + ); + } + return Text('RTCVideoPlatformView only support for iOS.'); + } + + void showVideoView(bool show) { + if (mounted) { + _showVideoView = show; + setState(() {}); + } + } + + Future onPlatformViewCreated(int id) async { + final controller = RTCVideoPlatformViewController(id); + _controller = controller; + controller.onFirstFrameRendered = () => showVideoView(true); + controller.onSrcObjectChange = () => showVideoView(false); + controller.onResize = () => showVideoView(true); + widget.onViewReady?.call(controller); + await _controller?.initialize(); + } +} diff --git a/lib/src/native/rtc_video_platform_view_controller.dart b/lib/src/native/rtc_video_platform_view_controller.dart new file mode 100644 index 0000000000..e9eeb1d51c --- /dev/null +++ b/lib/src/native/rtc_video_platform_view_controller.dart @@ -0,0 +1,183 @@ +import 'dart:async'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../helper.dart'; +import 'utils.dart'; + +class RTCVideoPlatformViewController extends ValueNotifier + implements VideoRenderer { + RTCVideoPlatformViewController(int viewId) : super(RTCVideoValue.empty) { + _viewId = viewId; + } + int? _viewId; + bool _disposed = false; + MediaStream? _srcObject; + StreamSubscription? _eventSubscription; + + @override + Future initialize() async { + _eventSubscription?.cancel(); + _eventSubscription = EventChannel('FlutterWebRTC/PlatformViewId$_viewId') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + @override + int get videoWidth => value.width.toInt(); + + @override + int get videoHeight => value.height.toInt(); + + @override + int? get textureId => _viewId; + + @override + MediaStream? get srcObject => _srcObject; + + @override + Function? onResize; + + @override + Function? onFirstFrameRendered; + + Function? onSrcObjectChange; + + @override + set srcObject(MediaStream? stream) { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoPlatformController is disposed'; + } + if (_viewId == null) throw 'Call initialize before setting the stream'; + if (_srcObject == stream) return; + _srcObject = stream; + onSrcObjectChange?.call(); + WebRTC.invokeMethod( + 'videoPlatformViewRendererSetSrcObject', { + 'viewId': _viewId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '' + }).then((_) { + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + }).catchError((e) { + print( + 'Got exception for RTCVideoPlatformController::setSrcObject: ${e.message}'); + }, test: (e) => e is PlatformException); + } + + Future setSrcObject({MediaStream? stream, String? trackId}) async { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoPlatformController is disposed'; + } + if (_viewId == null) throw 'Call initialize before setting the stream'; + if (_srcObject == stream) return; + _srcObject = stream; + onSrcObjectChange?.call(); + var oldviewId = _viewId; + try { + await WebRTC.invokeMethod( + 'videoPlatformViewRendererSetSrcObject', { + 'viewId': _viewId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '', + 'trackId': trackId ?? '0' + }); + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + } on PlatformException catch (e) { + throw 'Got exception for RTCVideoPlatformController::setSrcObject: viewId $oldviewId [disposed: $_disposed] with stream ${stream?.id}, error: ${e.message}'; + } + } + + @override + Future dispose() async { + if (_disposed) return; + await _eventSubscription?.cancel(); + _eventSubscription = null; + if (_viewId != null) { + try { + await WebRTC.invokeMethod( + 'videoPlatformViewRendererDispose', { + 'viewId': _viewId, + }); + _viewId = null; + } on PlatformException catch (e) { + throw 'Failed to RTCVideoPlatformController::dispose: ${e.message}'; + } + } + _disposed = true; + super.dispose(); + } + + void eventListener(dynamic event) { + if (_disposed) return; + final Map map = event; + switch (map['event']) { + case 'didPlatformViewChangeRotation': + value = + value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); + onResize?.call(); + break; + case 'didPlatformViewChangeVideoSize': + value = value.copyWith( + width: 0.0 + map['width'], + height: 0.0 + map['height'], + renderVideo: renderVideo); + onResize?.call(); + break; + case 'didFirstFrameRendered': + value = value.copyWith(renderVideo: renderVideo); + onFirstFrameRendered?.call(); + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + bool get renderVideo => _viewId != null && _srcObject != null; + + @override + bool get muted => _srcObject?.getAudioTracks()[0].muted ?? true; + + @override + set muted(bool mute) { + if (_disposed) { + throw Exception( + 'Can\'t be muted: The RTCVideoPlatformController is disposed'); + } + if (_srcObject == null) { + throw Exception('Can\'t be muted: The MediaStream is null'); + } + if (_srcObject!.ownerTag != 'local') { + throw Exception( + 'You\'re trying to mute a remote track, this is not supported'); + } + if (_srcObject!.getAudioTracks().isEmpty) { + throw Exception('Can\'t be muted: The MediaStreamTrack(audio) is empty'); + } + + Helper.setMicrophoneMute(mute, _srcObject!.getAudioTracks()[0]); + } + + @override + Future audioOutput(String deviceId) async { + try { + await Helper.selectAudioOutput(deviceId); + } catch (e) { + print('Helper.selectAudioOutput ${e.toString()}'); + return false; + } + return true; + } +} diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart new file mode 100644 index 0000000000..c2a46cba75 --- /dev/null +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -0,0 +1,179 @@ +import 'dart:async'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import '../helper.dart'; +import 'utils.dart'; + +class RTCVideoRenderer extends ValueNotifier + implements VideoRenderer { + RTCVideoRenderer() : super(RTCVideoValue.empty); + Completer? _initializing; + int? _textureId; + bool _disposed = false; + MediaStream? _srcObject; + StreamSubscription? _eventSubscription; + + @override + Future initialize() async { + if (_initializing != null) { + await _initializing!.future; + return; + } + _initializing = Completer(); + final response = await WebRTC.invokeMethod('createVideoRenderer', {}); + _textureId = response['textureId']; + _eventSubscription = EventChannel('FlutterWebRTC/Texture$textureId') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + _initializing!.complete(null); + } + + @override + int get videoWidth => value.width.toInt(); + + @override + int get videoHeight => value.height.toInt(); + + @override + int? get textureId => _textureId; + + @override + MediaStream? get srcObject => _srcObject; + + @override + Function? onResize; + + @override + Function? onFirstFrameRendered; + + @override + set srcObject(MediaStream? stream) { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoRenderer is disposed'; + } + if (textureId == null) throw 'Call initialize before setting the stream'; + _srcObject = stream; + WebRTC.invokeMethod('videoRendererSetSrcObject', { + 'textureId': textureId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '' + }).then((_) { + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + }).catchError((e) { + print('Got exception for RTCVideoRenderer::setSrcObject: ${e.message}'); + }, test: (e) => e is PlatformException); + } + + Future setSrcObject({MediaStream? stream, String? trackId}) async { + if (_disposed) { + throw 'Can\'t set srcObject: The RTCVideoRenderer is disposed'; + } + if (_textureId == null) throw 'Call initialize before setting the stream'; + _srcObject = stream; + var oldTextureId = _textureId; + try { + await WebRTC.invokeMethod('videoRendererSetSrcObject', { + 'textureId': _textureId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '', + 'trackId': trackId ?? '0' + }); + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + } on PlatformException catch (e) { + throw 'Got exception for RTCVideoRenderer::setSrcObject: textureId $oldTextureId [disposed: $_disposed] with stream ${stream?.id}, error: ${e.message}'; + } + } + + @override + Future dispose() async { + if (_disposed) return; + await _eventSubscription?.cancel(); + _eventSubscription = null; + if (_textureId != null) { + try { + await WebRTC.invokeMethod('videoRendererDispose', { + 'textureId': _textureId, + }); + _textureId = null; + _disposed = true; + } on PlatformException catch (e) { + throw 'Failed to RTCVideoRenderer::dispose: ${e.message}'; + } + } + + return super.dispose(); + } + + void eventListener(dynamic event) { + if (_disposed) return; + final Map map = event; + switch (map['event']) { + case 'didTextureChangeRotation': + value = + value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); + onResize?.call(); + break; + case 'didTextureChangeVideoSize': + value = value.copyWith( + width: 0.0 + map['width'], + height: 0.0 + map['height'], + renderVideo: renderVideo); + onResize?.call(); + break; + case 'didFirstFrameRendered': + value = value.copyWith(renderVideo: renderVideo); + onFirstFrameRendered?.call(); + break; + } + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } + + @override + bool get renderVideo => _textureId != null && _srcObject != null; + + @override + bool get muted => _srcObject?.getAudioTracks()[0].muted ?? true; + + @override + set muted(bool mute) { + if (_disposed) { + throw Exception('Can\'t be muted: The RTCVideoRenderer is disposed'); + } + if (_srcObject == null) { + throw Exception('Can\'t be muted: The MediaStream is null'); + } + if (_srcObject!.ownerTag != 'local') { + throw Exception( + 'You\'re trying to mute a remote track, this is not supported'); + } + if (_srcObject!.getAudioTracks().isEmpty) { + throw Exception('Can\'t be muted: The MediaStreamTrack(audio) is empty'); + } + + Helper.setMicrophoneMute(mute, _srcObject!.getAudioTracks()[0]); + } + + @override + Future audioOutput(String deviceId) async { + try { + await Helper.selectAudioOutput(deviceId); + } catch (e) { + print('Helper.selectAudioOutput ${e.toString()}'); + return false; + } + return true; + } +} diff --git a/lib/src/native/rtc_video_view_impl.dart b/lib/src/native/rtc_video_view_impl.dart new file mode 100644 index 0000000000..9d236143c1 --- /dev/null +++ b/lib/src/native/rtc_video_view_impl.dart @@ -0,0 +1,71 @@ +import 'dart:math'; + +import 'package:flutter/material.dart'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_video_renderer_impl.dart'; + +class RTCVideoView extends StatelessWidget { + RTCVideoView( + this._renderer, { + super.key, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + this.filterQuality = FilterQuality.low, + this.placeholderBuilder, + }); + + final RTCVideoRenderer _renderer; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + final FilterQuality filterQuality; + final WidgetBuilder? placeholderBuilder; + + RTCVideoRenderer get videoRenderer => _renderer; + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) => + _buildVideoView(context, constraints)); + } + + Widget _buildVideoView(BuildContext context, BoxConstraints constraints) { + return Center( + child: Container( + width: constraints.maxWidth, + height: constraints.maxHeight, + child: FittedBox( + clipBehavior: Clip.hardEdge, + fit: objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? BoxFit.contain + : BoxFit.cover, + child: Center( + child: ValueListenableBuilder( + valueListenable: videoRenderer, + builder: + (BuildContext context, RTCVideoValue value, Widget? child) { + return SizedBox( + width: constraints.maxHeight * value.aspectRatio, + height: constraints.maxHeight, + child: child, + ); + }, + child: Transform( + transform: Matrix4.identity()..rotateY(mirror ? -pi : 0.0), + alignment: FractionalOffset.center, + child: videoRenderer.renderVideo + ? Texture( + textureId: videoRenderer.textureId!, + filterQuality: filterQuality, + ) + : placeholderBuilder?.call(context) ?? Container(), + ), + ), + ), + ), + ), + ); + } +} diff --git a/lib/src/native/utils.dart b/lib/src/native/utils.dart new file mode 100644 index 0000000000..362e7917cd --- /dev/null +++ b/lib/src/native/utils.dart @@ -0,0 +1,61 @@ +import 'dart:io'; + +import 'package:flutter/services.dart'; + +class WebRTC { + static const MethodChannel _channel = MethodChannel('FlutterWebRTC.Method'); + + static bool get platformIsDesktop => + Platform.isWindows || Platform.isMacOS || Platform.isLinux; + + static bool get platformIsWindows => Platform.isWindows; + + static bool get platformIsMacOS => Platform.isMacOS; + + static bool get platformIsLinux => Platform.isLinux; + + static bool get platformIsMobile => Platform.isIOS || Platform.isAndroid; + + static bool get platformIsIOS => Platform.isIOS; + + static bool get platformIsAndroid => Platform.isAndroid; + + static bool get platformIsWeb => false; + + static Future invokeMethod(String methodName, + [dynamic param]) async { + await initialize(); + + return _channel.invokeMethod( + methodName, + param, + ); + } + + static bool initialized = false; + + /// Initialize the WebRTC plugin. If this is not manually called, will be + /// initialized with default settings. + /// + /// Params: + /// + /// "networkIgnoreMask": a list of AdapterType objects converted to string with `.value` + /// + /// Android specific params: + /// + /// "forceSWCodec": a boolean that forces software codecs to be used for video. + /// + /// "forceSWCodecList": a list of strings of software codecs that should use software. + /// + /// "androidAudioConfiguration": an AndroidAudioConfiguration object mapped with toMap() + /// + /// "bypassVoiceProcessing": a boolean that bypasses the audio processing for the audio device. + static Future initialize({Map? options}) async { + if (!initialized) { + await _channel.invokeMethod('initialize', { + 'options': options ?? {}, + }); + initialized = true; + } + } +} diff --git a/lib/src/video_renderer_extension.dart b/lib/src/video_renderer_extension.dart new file mode 100644 index 0000000000..fa8b7ac78b --- /dev/null +++ b/lib/src/video_renderer_extension.dart @@ -0,0 +1,5 @@ +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +extension VideoRendererExtension on RTCVideoRenderer { + RTCVideoValue get videoValue => value; +} diff --git a/lib/src/web/factory_impl.dart b/lib/src/web/factory_impl.dart new file mode 100644 index 0000000000..0df13f80cd --- /dev/null +++ b/lib/src/web/factory_impl.dart @@ -0,0 +1,6 @@ +import '../desktop_capturer.dart'; + +export 'package:dart_webrtc/dart_webrtc.dart' + hide videoRenderer, MediaDevices, MediaRecorder; + +DesktopCapturer get desktopCapturer => throw UnimplementedError(); diff --git a/lib/src/web/rtc_video_renderer_impl.dart b/lib/src/web/rtc_video_renderer_impl.dart new file mode 100644 index 0000000000..69df097e0c --- /dev/null +++ b/lib/src/web/rtc_video_renderer_impl.dart @@ -0,0 +1,329 @@ +import 'dart:async'; +import 'dart:js_interop'; +import 'dart:ui_web' as web_ui; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; + +import 'package:dart_webrtc/dart_webrtc.dart'; +import 'package:web/web.dart' as web; + +// An error code value to error name Map. +// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code +const Map _kErrorValueToErrorName = { + 1: 'MEDIA_ERR_ABORTED', + 2: 'MEDIA_ERR_NETWORK', + 3: 'MEDIA_ERR_DECODE', + 4: 'MEDIA_ERR_SRC_NOT_SUPPORTED', +}; + +// An error code value to description Map. +// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code +const Map _kErrorValueToErrorDescription = { + 1: 'The user canceled the fetching of the video.', + 2: 'A network error occurred while fetching the video, despite having previously been available.', + 3: 'An error occurred while trying to decode the video, despite having previously been determined to be usable.', + 4: 'The video has been found to be unsuitable (missing or in a format not supported by your browser).', +}; + +// The default error message, when the error is an empty string +// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message +const String _kDefaultErrorMessage = + 'No further diagnostic information can be determined or provided.'; + +class RTCVideoRenderer extends ValueNotifier + implements VideoRenderer { + RTCVideoRenderer() + : _textureId = _textureCounter++, + super(RTCVideoValue.empty); + + static const _elementIdForAudioManager = 'html_webrtc_audio_manager_list'; + + web.HTMLAudioElement? _audioElement; + + static int _textureCounter = 1; + + web.MediaStream? _videoStream; + + web.MediaStream? _audioStream; + + MediaStreamWeb? _srcObject; + + final int _textureId; + + bool mirror = false; + + final _subscriptions = []; + + String _objectFit = 'contain'; + + bool _muted = false; + + set objectFit(String fit) { + if (_objectFit == fit) return; + _objectFit = fit; + findHtmlView()?.style.objectFit = fit; + } + + @override + int get videoWidth => value.width.toInt(); + + @override + int get videoHeight => value.height.toInt(); + + @override + int get textureId => _textureId; + + @override + bool get muted => _muted; + + @override + set muted(bool mute) => _audioElement?.muted = _muted = mute; + + @override + bool get renderVideo => _srcObject != null; + + String get _elementIdForAudio => 'audio_$viewType'; + + String get _elementIdForVideo => 'video_$viewType'; + + String get viewType => 'RTCVideoRenderer-$textureId'; + + void _updateAllValues(web.HTMLVideoElement fallback) { + final element = findHtmlView() ?? fallback; + value = value.copyWith( + rotation: 0, + width: element.videoWidth.toDouble(), + height: element.videoHeight.toDouble(), + renderVideo: renderVideo, + ); + } + + @override + MediaStream? get srcObject => _srcObject; + + @override + set srcObject(MediaStream? stream) { + if (stream == null) { + findHtmlView()?.srcObject = null; + _audioElement?.srcObject = null; + _srcObject = null; + return; + } + + _srcObject = stream as MediaStreamWeb; + + if (null != _srcObject) { + if (stream.getVideoTracks().isNotEmpty) { + _videoStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getVideoTracks().toDart) { + _videoStream!.addTrack(track); + } + } + if (stream.getAudioTracks().isNotEmpty) { + _audioStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getAudioTracks().toDart) { + _audioStream!.addTrack(track); + } + } + } else { + _videoStream = null; + _audioStream = null; + } + + if (null != _audioStream) { + if (null == _audioElement) { + _audioElement = web.HTMLAudioElement() + ..id = _elementIdForAudio + ..muted = stream.ownerTag == 'local' + ..autoplay = true; + _ensureAudioManagerDiv().append(_audioElement!); + } + _audioElement?.srcObject = _audioStream; + } + + var videoElement = findHtmlView(); + if (null != videoElement) { + videoElement.srcObject = _videoStream; + _applyDefaultVideoStyles(findHtmlView()!); + } + + value = value.copyWith(renderVideo: renderVideo); + } + + Future setSrcObject({MediaStream? stream, String? trackId}) async { + if (stream == null) { + findHtmlView()?.srcObject = null; + _audioElement?.srcObject = null; + _srcObject = null; + return; + } + + _srcObject = stream as MediaStreamWeb; + + if (null != _srcObject) { + if (stream.getVideoTracks().isNotEmpty) { + _videoStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getVideoTracks().toDart) { + if (track.id == trackId) { + _videoStream!.addTrack(track); + } + } + } + if (stream.getAudioTracks().isNotEmpty) { + _audioStream = web.MediaStream(); + for (final track in _srcObject!.jsStream.getAudioTracks().toDart) { + _audioStream!.addTrack(track); + } + } + } else { + _videoStream = null; + _audioStream = null; + } + + if (null != _audioStream) { + if (null == _audioElement) { + _audioElement = web.HTMLAudioElement() + ..id = _elementIdForAudio + ..muted = stream.ownerTag == 'local' + ..autoplay = true; + _ensureAudioManagerDiv().append(_audioElement!); + } + _audioElement?.srcObject = _audioStream; + } + + var videoElement = findHtmlView(); + if (null != videoElement) { + videoElement.srcObject = _videoStream; + _applyDefaultVideoStyles(findHtmlView()!); + } + + value = value.copyWith(renderVideo: renderVideo); + } + + web.HTMLDivElement _ensureAudioManagerDiv() { + var div = web.document.getElementById(_elementIdForAudioManager); + if (null != div) return div as web.HTMLDivElement; + + div = web.HTMLDivElement() + ..id = _elementIdForAudioManager + ..style.display = 'none'; + web.document.body?.append(div); + return div as web.HTMLDivElement; + } + + web.HTMLVideoElement? findHtmlView() { + final element = web.document.getElementById(_elementIdForVideo); + if (null != element) return element as web.HTMLVideoElement; + return null; + } + + @override + Future dispose() async { + _srcObject = null; + for (var s in _subscriptions) { + s.cancel(); + } + final element = findHtmlView(); + element?.removeAttribute('src'); + element?.load(); + _audioElement?.remove(); + final audioManager = web.document.getElementById(_elementIdForAudioManager) + as web.HTMLDivElement?; + if (audioManager != null && !audioManager.hasChildNodes()) { + audioManager.remove(); + } + return super.dispose(); + } + + @override + Future audioOutput(String deviceId) async { + try { + final element = _audioElement; + if (null != element) { + await element.setSinkId(deviceId).toDart; + return true; + } + } catch (e) { + print('Unable to setSinkId: ${e.toString()}'); + } + return false; + } + + @override + Future initialize() async { + web_ui.platformViewRegistry.registerViewFactory(viewType, (int viewId) { + for (var s in _subscriptions) { + s.cancel(); + } + _subscriptions.clear(); + + final element = web.HTMLVideoElement() + ..autoplay = true + ..muted = true + ..controls = false + ..srcObject = _videoStream + ..id = _elementIdForVideo + ..setAttribute('playsinline', 'true'); + + _applyDefaultVideoStyles(element); + + _subscriptions.add( + element.onCanPlay.listen((dynamic _) { + _updateAllValues(element); + }), + ); + + _subscriptions.add( + element.onResize.listen((dynamic _) { + _updateAllValues(element); + onResize?.call(); + }), + ); + + // The error event fires when some form of error occurs while attempting to load or perform the media. + _subscriptions.add( + element.onError.listen((web.Event _) { + // The Event itself (_) doesn't contain info about the actual error. + // We need to look at the HTMLMediaElement.error. + // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error + final error = element.error; + print('RTCVideoRenderer: videoElement.onError, ${error.toString()}'); + throw PlatformException( + code: _kErrorValueToErrorName[error!.code]!, + message: + error.message != '' ? error.message : _kDefaultErrorMessage, + details: _kErrorValueToErrorDescription[error.code], + ); + }), + ); + + _subscriptions.add( + element.onEnded.listen((dynamic _) { + // print('RTCVideoRenderer: videoElement.onEnded'); + }), + ); + + return element; + }); + } + + void _applyDefaultVideoStyles(web.HTMLVideoElement element) { + // Flip the video horizontally if is mirrored. + if (mirror) { + element.style.transform = 'scaleX(-1)'; + } + + element + ..style.objectFit = _objectFit + ..style.border = 'none' + ..style.width = '100%' + ..style.height = '100%'; + } + + @override + Function? onResize; + + @override + Function? onFirstFrameRendered; +} diff --git a/lib/src/web/rtc_video_view_impl.dart b/lib/src/web/rtc_video_view_impl.dart new file mode 100644 index 0000000000..9ef8ff1461 --- /dev/null +++ b/lib/src/web/rtc_video_view_impl.dart @@ -0,0 +1,88 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; + +import 'package:dart_webrtc/dart_webrtc.dart'; +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'rtc_video_renderer_impl.dart'; + +class RTCVideoView extends StatefulWidget { + RTCVideoView( + this._renderer, { + super.key, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + this.filterQuality = FilterQuality.low, + this.placeholderBuilder, + }); + + final RTCVideoRenderer _renderer; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + final FilterQuality filterQuality; + final WidgetBuilder? placeholderBuilder; + + @override + RTCVideoViewState createState() => RTCVideoViewState(); +} + +class RTCVideoViewState extends State { + RTCVideoViewState(); + + RTCVideoRenderer get videoRenderer => widget._renderer; + + @override + void initState() { + super.initState(); + videoRenderer.addListener(_onRendererListener); + videoRenderer.mirror = widget.mirror; + videoRenderer.objectFit = + widget.objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? 'contain' + : 'cover'; + } + + void _onRendererListener() { + if (mounted) setState(() {}); + } + + @override + void dispose() { + if (mounted) { + super.dispose(); + } + } + + @override + void didUpdateWidget(RTCVideoView oldWidget) { + super.didUpdateWidget(oldWidget); + Timer( + Duration(milliseconds: 10), () => videoRenderer.mirror = widget.mirror); + videoRenderer.objectFit = + widget.objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? 'contain' + : 'cover'; + } + + Widget buildVideoElementView() { + return HtmlElementView(viewType: videoRenderer.viewType); + } + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return Center( + child: Container( + width: constraints.maxWidth, + height: constraints.maxHeight, + child: widget._renderer.renderVideo + ? buildVideoElementView() + : widget.placeholderBuilder?.call(context) ?? Container(), + ), + ); + }, + ); + } +} diff --git a/lib/src/web/utils.dart b/lib/src/web/utils.dart new file mode 100644 index 0000000000..9203763bf2 --- /dev/null +++ b/lib/src/web/utils.dart @@ -0,0 +1,24 @@ +class WebRTC { + static bool get platformIsDesktop => false; + + static bool get platformIsWindows => false; + + static bool get platformIsMacOS => false; + + static bool get platformIsLinux => false; + + static bool get platformIsMobile => false; + + static bool get platformIsIOS => false; + + static bool get platformIsAndroid => false; + + static bool get platformIsWeb => true; + + static Future invokeMethod(String methodName, + [dynamic param]) async => + throw UnimplementedError(); + + static Future initialize({Map? options}) async => + throw UnimplementedError('initialize is not supported on web'); +} diff --git a/lib/utils.dart b/lib/utils.dart deleted file mode 100644 index b9f14b1314..0000000000 --- a/lib/utils.dart +++ /dev/null @@ -1,74 +0,0 @@ -import 'package:flutter/services.dart'; -import 'package:webrtc/rtc_peerconnection.dart'; -import 'package:webrtc/rtc_data_channel.dart'; - -class WebRTC { - static const MethodChannel _channel = const MethodChannel('cloudwebrtc.com/WebRTC.Method'); - static MethodChannel methodChannel() => _channel; -} - -RTCIceConnectionState iceConnectionStateForString(String state) { - switch (state) { - case "new": - return RTCIceConnectionState.RTCIceConnectionStateNew; - case "checking": - return RTCIceConnectionState.RTCIceConnectionStateChecking; - case "connected": - return RTCIceConnectionState.RTCIceConnectionStateConnected; - case "completed": - return RTCIceConnectionState.RTCIceConnectionStateCompleted; - case "failed": - return RTCIceConnectionState.RTCIceConnectionStateFailed; - case "disconnected": - return RTCIceConnectionState.RTCIceConnectionStateDisconnected; - case "closed": - return RTCIceConnectionState.RTCIceConnectionStateClosed; - case "count": - return RTCIceConnectionState.RTCIceConnectionStateCount; - } - return RTCIceConnectionState.RTCIceConnectionStateClosed; -} - -RTCIceGatheringState iceGatheringStateforString(String state) { - switch (state) { - case "new": - return RTCIceGatheringState.RTCIceGatheringStateNew; - case "gathering": - return RTCIceGatheringState.RTCIceGatheringStateGathering; - case "complete": - return RTCIceGatheringState.RTCIceGatheringStateComplete; - } - return RTCIceGatheringState.RTCIceGatheringStateNew; -} - -RTCSignalingState signalingStateForString(String state) { - switch (state) { - case "stable": - return RTCSignalingState.RTCSignalingStateStable; - case "have-local-offer": - return RTCSignalingState.RTCSignalingStateHaveLocalOffer; - case "have-local-pranswer": - return RTCSignalingState.RTCSignalingStateHaveLocalPrAnswer; - case "have-remote-offer": - return RTCSignalingState.RTCSignalingStateHaveRemoteOffer; - case "have-remote-pranswer": - return RTCSignalingState.RTCSignalingStateHaveRemotePrAnswer; - case "closed": - return RTCSignalingState.RTCSignalingStateClosed; - } - return RTCSignalingState.RTCSignalingStateClosed; -} - -RTCDataChannelState rtcDataChannelStateForString(String state) { - switch (state) { - case "connecting": - return RTCDataChannelState.RTCDataChannelConnecting; - case "open": - return RTCDataChannelState.RTCDataChannelOpen; - case "closing": - return RTCDataChannelState.RTCDataChannelClosing; - case "closed": - return RTCDataChannelState.RTCDataChannelClosed; - } - return RTCDataChannelState.RTCDataChannelClosed; -} diff --git a/lib/webrtc.dart b/lib/webrtc.dart deleted file mode 100644 index 92394dc2b9..0000000000 --- a/lib/webrtc.dart +++ /dev/null @@ -1,10 +0,0 @@ -export 'get_user_media.dart'; -export 'media_stream_track.dart'; -export 'media_stream.dart'; -export 'rtc_data_channel.dart'; -export 'rtc_video_view.dart'; -export 'rtc_ice_candidate.dart'; -export 'rtc_session_description.dart'; -export 'rtc_peerconnection.dart'; -export 'rtc_peerconnection_factory.dart'; -export 'rtc_stats_report.dart'; diff --git a/linux/CMakeLists.txt b/linux/CMakeLists.txt new file mode 100644 index 0000000000..8adc01ed33 --- /dev/null +++ b/linux/CMakeLists.txt @@ -0,0 +1,65 @@ +cmake_minimum_required(VERSION 3.10) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +set(PLUGIN_NAME "${PROJECT_NAME}_plugin") +set (CMAKE_EXPORT_COMPILE_COMMANDS ON ) +set(CMAKE_CXX_STANDARD 17) + +add_definitions(-DRTC_DESKTOP_DEVICE) + +add_library(${PLUGIN_NAME} SHARED + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "../common/cpp/src/flutter_common.cc" + "flutter_webrtc_plugin.cc" + "flutter/core_implementations.cc" + "flutter/standard_codec.cc" + "flutter/plugin_registrar.cc" + "task_runner_linux.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/flutter/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) +target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) + + +target_link_libraries(${PLUGIN_NAME} PRIVATE + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" + PARENT_SCOPE +) + +# Add $ORIGIN to RPATH so that lib/libflutter_webrtc_plugin.so can find lib/libwebrtc.so at runtime +set_property( + TARGET ${PLUGIN_NAME} + PROPERTY BUILD_RPATH + "\$ORIGIN" +) diff --git a/linux/flutter/binary_messenger_impl.h b/linux/flutter/binary_messenger_impl.h new file mode 100644 index 0000000000..be410be023 --- /dev/null +++ b/linux/flutter/binary_messenger_impl.h @@ -0,0 +1,50 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BINARY_MESSENGER_IMPL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BINARY_MESSENGER_IMPL_H_ + +#include + +#include +#include + +#include "include/flutter/binary_messenger.h" + +namespace flutter { + +// Wrapper around a FlutterDesktopMessengerRef that implements the +// BinaryMessenger API. +class BinaryMessengerImpl : public BinaryMessenger { + public: + explicit BinaryMessengerImpl(FlBinaryMessenger* core_messenger); + + virtual ~BinaryMessengerImpl(); + + // Prevent copying. + BinaryMessengerImpl(BinaryMessengerImpl const&) = delete; + BinaryMessengerImpl& operator=(BinaryMessengerImpl const&) = delete; + + // |flutter::BinaryMessenger| + void Send(const std::string& channel, + const uint8_t* message, + size_t message_size, + BinaryReply reply) const override; + + // |flutter::BinaryMessenger| + void SetMessageHandler(const std::string& channel, + BinaryMessageHandler handler) override; + + private: + // Handle for interacting with the C API. + FlBinaryMessenger* messenger_; + + // A map from channel names to the BinaryMessageHandler that should be called + // for incoming messages on that channel. + std::map handlers_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BINARY_MESSENGER_IMPL_H_ diff --git a/linux/flutter/byte_buffer_streams.h b/linux/flutter/byte_buffer_streams.h new file mode 100644 index 0000000000..55b01c8831 --- /dev/null +++ b/linux/flutter/byte_buffer_streams.h @@ -0,0 +1,102 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BYTE_BUFFER_STREAMS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BYTE_BUFFER_STREAMS_H_ + +#include +#include +#include +#include +#include + +#include "include/flutter/byte_streams.h" + +namespace flutter { + +// Implementation of ByteStreamReader base on a byte array. +class ByteBufferStreamReader : public ByteStreamReader { + public: + // Createa a reader reading from |bytes|, which must have a length of |size|. + // |bytes| must remain valid for the lifetime of this object. + explicit ByteBufferStreamReader(const uint8_t* bytes, size_t size) + : bytes_(bytes), size_(size) {} + + virtual ~ByteBufferStreamReader() = default; + + // |ByteStreamReader| + uint8_t ReadByte() override { + if (location_ >= size_) { + std::cerr << "Invalid read in StandardCodecByteStreamReader" << std::endl; + return 0; + } + return bytes_[location_++]; + } + + // |ByteStreamReader| + void ReadBytes(uint8_t* buffer, size_t length) override { + if (location_ + length > size_) { + std::cerr << "Invalid read in StandardCodecByteStreamReader" << std::endl; + return; + } + std::memcpy(buffer, &bytes_[location_], length); + location_ += length; + } + + // |ByteStreamReader| + void ReadAlignment(uint8_t alignment) override { + uint8_t mod = location_ % alignment; + if (mod) { + location_ += alignment - mod; + } + } + + private: + // The buffer to read from. + const uint8_t* bytes_; + // The total size of the buffer. + size_t size_; + // The current read location. + size_t location_ = 0; +}; + +// Implementation of ByteStreamWriter based on a byte array. +class ByteBufferStreamWriter : public ByteStreamWriter { + public: + // Creates a writer that writes into |buffer|. + // |buffer| must remain valid for the lifetime of this object. + explicit ByteBufferStreamWriter(std::vector* buffer) + : bytes_(buffer) { + assert(buffer); + } + + virtual ~ByteBufferStreamWriter() = default; + + // |ByteStreamWriter| + void WriteByte(uint8_t byte) { bytes_->push_back(byte); } + + // |ByteStreamWriter| + void WriteBytes(const uint8_t* bytes, size_t length) { + assert(length > 0); + bytes_->insert(bytes_->end(), bytes, bytes + length); + } + + // |ByteStreamWriter| + void WriteAlignment(uint8_t alignment) { + uint8_t mod = bytes_->size() % alignment; + if (mod) { + for (int i = 0; i < alignment - mod; ++i) { + WriteByte(0); + } + } + } + + private: + // The buffer to write to. + std::vector* bytes_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_BYTE_BUFFER_STREAMS_H_ diff --git a/linux/flutter/core_implementations.cc b/linux/flutter/core_implementations.cc new file mode 100644 index 0000000000..500ccca2ab --- /dev/null +++ b/linux/flutter/core_implementations.cc @@ -0,0 +1,257 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the implementations of any class in the wrapper that +// - is not fully inline, and +// - is necessary for all clients of the wrapper (either app or plugin). +// It exists instead of the usual structure of having some_class_name.cc files +// so that changes to the set of things that need non-header implementations +// are not breaking changes for the template. +// +// If https://github.com/flutter/flutter/issues/57146 is fixed, this can be +// removed in favor of the normal structure since templates will no longer +// manually include files. + +#include +#include +#include + +#include "binary_messenger_impl.h" +#include "include/flutter/engine_method_result.h" +#include "include/flutter/texture_registrar.h" +#include "texture_registrar_impl.h" + +struct FlTextureProxy { + FlPixelBufferTexture parent_instance; + flutter::TextureVariant* texture = nullptr; +}; + +struct FlTextureProxyClass { + FlPixelBufferTextureClass parent_class; +}; + +G_DEFINE_TYPE(FlTextureProxy, + fl_texture_proxy, + fl_pixel_buffer_texture_get_type()) + +#define FL_TEXTURE_PROXY(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), fl_texture_proxy_get_type(), \ + FlTextureProxy)) + +static gboolean fl_texture_proxy_copy_pixels(FlPixelBufferTexture* texture, + const uint8_t** out_buffer, + uint32_t* width, + uint32_t* height, + GError** error) { + FlTextureProxy* proxy = FL_TEXTURE_PROXY(texture); + flutter::PixelBufferTexture& pixel_buffer = + std::get(*proxy->texture); + const FlutterDesktopPixelBuffer* copy = + pixel_buffer.CopyPixelBuffer(*width, *height); + if (copy == nullptr) { + return TRUE; + } + *out_buffer = copy->buffer; + *width = copy->width; + *height = copy->height; + return TRUE; +} + +static FlTextureProxy* fl_texture_proxy_new(flutter::TextureVariant* texture) { + FlTextureProxy* proxy = + FL_TEXTURE_PROXY(g_object_new(fl_texture_proxy_get_type(), nullptr)); + proxy->texture = texture; + return proxy; +} + +static void fl_texture_proxy_class_init(FlTextureProxyClass* klass) { + FL_PIXEL_BUFFER_TEXTURE_CLASS(klass)->copy_pixels = + fl_texture_proxy_copy_pixels; +} + +static void fl_texture_proxy_init(FlTextureProxy* self) {} + +namespace flutter { + +// ========== binary_messenger_impl.h ========== + +namespace { +// Passes |message| to |user_data|, which must be a BinaryMessageHandler, along +// with a BinaryReply that will send a response on |message|'s response handle. +// +// This serves as an adaptor between the function-pointer-based message callback +// interface provided by the C API and the std::function-based message handler +// interface of BinaryMessenger. +static void ForwardToHandler(FlBinaryMessenger* messenger, + const gchar* channel, + GBytes* message, + FlBinaryMessengerResponseHandle* response_handle, + gpointer user_data) { + auto handler = g_object_ref(response_handle); + BinaryReply reply_handler = [messenger, handler](const uint8_t* reply, + size_t reply_size) mutable { + if (!handler) { + std::cerr << "Error: Response can be set only once. Ignoring " + "duplicate response." + << std::endl; + return; + } + + g_autoptr(GBytes) response = g_bytes_new(reply, reply_size); + GError* error = nullptr; + if (!fl_binary_messenger_send_response( + messenger, (FlBinaryMessengerResponseHandle*)handler, response, + &error)) { + g_warning("Failed to send binary response: %s", error->message); + } + }; + + const BinaryMessageHandler& message_handler = + *static_cast(user_data); + + if (user_data == nullptr) { + std::cerr << "Error: user_data is null" << std::endl; + return; + } + + message_handler( + static_cast(g_bytes_get_data(message, nullptr)), + g_bytes_get_size(message), std::move(reply_handler)); +} +} // namespace + +BinaryMessengerImpl::BinaryMessengerImpl(FlBinaryMessenger* core_messenger) + : messenger_(core_messenger) {} + +BinaryMessengerImpl::~BinaryMessengerImpl() = default; + +struct Captures { + BinaryReply reply; +}; + +static void message_reply_cb(GObject* object, + GAsyncResult* result, + gpointer user_data) { + g_autoptr(GError) error = nullptr; + auto captures = reinterpret_cast(user_data); + g_autoptr(GBytes) message = fl_binary_messenger_send_on_channel_finish( + FL_BINARY_MESSENGER(object), result, &error); + captures->reply( + static_cast(g_bytes_get_data(message, nullptr)), + g_bytes_get_size(message)); + delete captures; +}; + +void BinaryMessengerImpl::Send(const std::string& channel, + const uint8_t* message, + size_t message_size, + BinaryReply reply) const { + if (reply == nullptr) { + g_autoptr(GBytes) data = g_bytes_new(message, message_size); + fl_binary_messenger_send_on_channel(messenger_, channel.c_str(), data, + nullptr, nullptr, nullptr); + return; + } + + auto captures = new Captures(); + captures->reply = reply; + + g_autoptr(GBytes) data = g_bytes_new(message, message_size); + fl_binary_messenger_send_on_channel(messenger_, channel.c_str(), data, + nullptr, message_reply_cb, captures); +} + +void BinaryMessengerImpl::SetMessageHandler(const std::string& channel, + BinaryMessageHandler handler) { + if (!handler) { + handlers_.erase(channel); + fl_binary_messenger_set_message_handler_on_channel( + messenger_, channel.c_str(), nullptr, nullptr, nullptr); + return; + } + // Save the handler, to keep it alive. + handlers_[channel] = std::move(handler); + BinaryMessageHandler* message_handler = &handlers_[channel]; + // Set an adaptor callback that will invoke the handler. + fl_binary_messenger_set_message_handler_on_channel( + messenger_, channel.c_str(), ForwardToHandler, message_handler, nullptr); +} + +// ========== engine_method_result.h ========== + +namespace internal { + +ReplyManager::ReplyManager(BinaryReply reply_handler) + : reply_handler_(std::move(reply_handler)) { + assert(reply_handler_); +} + +ReplyManager::~ReplyManager() { + if (reply_handler_) { + // Warn, rather than send a not-implemented response, since the engine may + // no longer be valid at this point. + std::cerr + << "Warning: Failed to respond to a message. This is a memory leak." + << std::endl; + } +} + +void ReplyManager::SendResponseData(const std::vector* data) { + if (!reply_handler_) { + std::cerr + << "Error: Only one of Success, Error, or NotImplemented can be " + "called," + << " and it can be called exactly once. Ignoring duplicate result." + << std::endl; + return; + } + + const uint8_t* message = data && !data->empty() ? data->data() : nullptr; + size_t message_size = data ? data->size() : 0; + reply_handler_(message, message_size); + reply_handler_ = nullptr; +} + +} // namespace internal + +// ========== texture_registrar_impl.h ========== + +TextureRegistrarImpl::TextureRegistrarImpl( + FlTextureRegistrar* texture_registrar_ref) + : texture_registrar_ref_(texture_registrar_ref) {} + +TextureRegistrarImpl::~TextureRegistrarImpl() = default; + +int64_t TextureRegistrarImpl::RegisterTexture(TextureVariant* texture) { + auto texture_proxy = fl_texture_proxy_new(texture); + fl_texture_registrar_register_texture(texture_registrar_ref_, + FL_TEXTURE(texture_proxy)); + int64_t texture_id = reinterpret_cast(texture_proxy); + textures_[texture_id] = texture_proxy; + return texture_id; +} + +bool TextureRegistrarImpl::MarkTextureFrameAvailable(int64_t texture_id) { + auto it = textures_.find(texture_id); + if (it != textures_.end()) { + return fl_texture_registrar_mark_texture_frame_available( + texture_registrar_ref_, FL_TEXTURE(it->second)); + } + return false; +} + +bool TextureRegistrarImpl::UnregisterTexture(int64_t texture_id) { + auto it = textures_.find(texture_id); + if (it != textures_.end()) { + auto texture = it->second; + textures_.erase(it); + bool success = fl_texture_registrar_unregister_texture( + texture_registrar_ref_, FL_TEXTURE(texture)); + g_object_unref(texture); + return success; + } + return false; +} + +} // namespace flutter diff --git a/linux/flutter/include/flutter/basic_message_channel.h b/linux/flutter/include/flutter/basic_message_channel.h new file mode 100644 index 0000000000..c0819465c5 --- /dev/null +++ b/linux/flutter/include/flutter/basic_message_channel.h @@ -0,0 +1,110 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BASIC_MESSAGE_CHANNEL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BASIC_MESSAGE_CHANNEL_H_ + +#include +#include + +#include "binary_messenger.h" +#include "message_codec.h" + +namespace flutter { + +class EncodableValue; + +// A message reply callback. +// +// Used for submitting a reply back to a Flutter message sender. +template +using MessageReply = std::function; + +// A handler for receiving a message from the Flutter engine. +// +// Implementations must asynchronously call reply exactly once with the reply +// to the message. +template +using MessageHandler = + std::function& reply)>; + +// A channel for communicating with the Flutter engine by sending asynchronous +// messages. +template +class BasicMessageChannel { + public: + // Creates an instance that sends and receives method calls on the channel + // named |name|, encoded with |codec| and dispatched via |messenger|. + BasicMessageChannel(BinaryMessenger* messenger, + const std::string& name, + const MessageCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + + ~BasicMessageChannel() = default; + + // Prevent copying. + BasicMessageChannel(BasicMessageChannel const&) = delete; + BasicMessageChannel& operator=(BasicMessageChannel const&) = delete; + + // Sends a message to the Flutter engine on this channel. + void Send(const T& message) { + std::unique_ptr> raw_message = + codec_->EncodeMessage(message); + messenger_->Send(name_, raw_message->data(), raw_message->size()); + } + + // Sends a message to the Flutter engine on this channel expecting a reply. + void Send(const T& message, BinaryReply reply) { + std::unique_ptr> raw_message = + codec_->EncodeMessage(message); + messenger_->Send(name_, raw_message->data(), raw_message->size(), reply); + } + + // Registers a handler that should be called any time a message is + // received on this channel. A null handler will remove any previous handler. + // + // Note that the BasicMessageChannel does not own the handler, and will not + // unregister it on destruction, so the caller is responsible for + // unregistering explicitly if it should no longer be called. + void SetMessageHandler(const MessageHandler& handler) const { + if (!handler) { + messenger_->SetMessageHandler(name_, nullptr); + return; + } + const auto* codec = codec_; + std::string channel_name = name_; + BinaryMessageHandler binary_handler = [handler, codec, channel_name]( + const uint8_t* binary_message, + const size_t binary_message_size, + BinaryReply binary_reply) { + // Use this channel's codec to decode the message and build a reply + // handler. + std::unique_ptr message = + codec->DecodeMessage(binary_message, binary_message_size); + if (!message) { + std::cerr << "Unable to decode message on channel " << channel_name + << std::endl; + binary_reply(nullptr, 0); + return; + } + + MessageReply unencoded_reply = [binary_reply, + codec](const T& unencoded_response) { + auto binary_response = codec->EncodeMessage(unencoded_response); + binary_reply(binary_response->data(), binary_response->size()); + }; + handler(*message, std::move(unencoded_reply)); + }; + messenger_->SetMessageHandler(name_, std::move(binary_handler)); + } + + private: + BinaryMessenger* messenger_; + std::string name_; + const MessageCodec* codec_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BASIC_MESSAGE_CHANNEL_H_ diff --git a/linux/flutter/include/flutter/binary_messenger.h b/linux/flutter/include/flutter/binary_messenger.h new file mode 100644 index 0000000000..d552b499ee --- /dev/null +++ b/linux/flutter/include/flutter/binary_messenger.h @@ -0,0 +1,52 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BINARY_MESSENGER_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BINARY_MESSENGER_H_ + +#include +#include + +namespace flutter { + +// A binary message reply callback. +// +// Used for submitting a binary reply back to a Flutter message sender. +typedef std::function + BinaryReply; + +// A message handler callback. +// +// Used for receiving messages from Flutter and providing an asynchronous reply. +typedef std::function< + void(const uint8_t* message, size_t message_size, BinaryReply reply)> + BinaryMessageHandler; + +// A protocol for a class that handles communication of binary data on named +// channels to and from the Flutter engine. +class BinaryMessenger { + public: + virtual ~BinaryMessenger() = default; + + // Sends a binary message to the Flutter engine on the specified channel. + // + // If |reply| is provided, it will be called back with the response from the + // engine. + virtual void Send(const std::string& channel, + const uint8_t* message, + size_t message_size, + BinaryReply reply = nullptr) const = 0; + + // Registers a message handler for incoming binary messages from the Flutter + // side on the specified channel. + // + // Replaces any existing handler. Provide a null handler to unregister the + // existing handler. + virtual void SetMessageHandler(const std::string& channel, + BinaryMessageHandler handler) = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BINARY_MESSENGER_H_ diff --git a/linux/flutter/include/flutter/byte_streams.h b/linux/flutter/include/flutter/byte_streams.h new file mode 100644 index 0000000000..3360bab945 --- /dev/null +++ b/linux/flutter/include/flutter/byte_streams.h @@ -0,0 +1,85 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BYTE_STREAMS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BYTE_STREAMS_H_ + +// Interfaces for interacting with a stream of bytes, for use in codecs. + +namespace flutter { + +// An interface for a class that reads from a byte stream. +class ByteStreamReader { + public: + explicit ByteStreamReader() = default; + virtual ~ByteStreamReader() = default; + + // Reads and returns the next byte from the stream. + virtual uint8_t ReadByte() = 0; + + // Reads the next |length| bytes from the stream into |buffer|. The caller + // is responsible for ensuring that |buffer| is large enough. + virtual void ReadBytes(uint8_t* buffer, size_t length) = 0; + + // Advances the read cursor to the next multiple of |alignment| relative to + // the start of the stream, unless it is already aligned. + virtual void ReadAlignment(uint8_t alignment) = 0; + + // Reads and returns the next 32-bit integer from the stream. + int32_t ReadInt32() { + int32_t value = 0; + ReadBytes(reinterpret_cast(&value), 4); + return value; + } + + // Reads and returns the next 64-bit integer from the stream. + int64_t ReadInt64() { + int64_t value = 0; + ReadBytes(reinterpret_cast(&value), 8); + return value; + } + + // Reads and returns the next 64-bit floating point number from the stream. + double ReadDouble() { + double value = 0; + ReadBytes(reinterpret_cast(&value), 8); + return value; + } +}; + +// An interface for a class that writes to a byte stream. +class ByteStreamWriter { + public: + explicit ByteStreamWriter() = default; + virtual ~ByteStreamWriter() = default; + + // Writes |byte| to the stream. + virtual void WriteByte(uint8_t byte) = 0; + + // Writes the next |length| bytes from |bytes| to the stream + virtual void WriteBytes(const uint8_t* bytes, size_t length) = 0; + + // Writes 0s until the next multiple of |alignment| relative to the start + // of the stream, unless the write positition is already aligned. + virtual void WriteAlignment(uint8_t alignment) = 0; + + // Writes the given 32-bit int to the stream. + void WriteInt32(int32_t value) { + WriteBytes(reinterpret_cast(&value), 4); + } + + // Writes the given 64-bit int to the stream. + void WriteInt64(int64_t value) { + WriteBytes(reinterpret_cast(&value), 8); + } + + // Writes the given 36-bit double to the stream. + void WriteDouble(double value) { + WriteBytes(reinterpret_cast(&value), 8); + } +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_BYTE_STREAMS_H_ diff --git a/linux/flutter/include/flutter/encodable_value.h b/linux/flutter/include/flutter/encodable_value.h new file mode 100644 index 0000000000..96490a2e12 --- /dev/null +++ b/linux/flutter/include/flutter/encodable_value.h @@ -0,0 +1,222 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENCODABLE_VALUE_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENCODABLE_VALUE_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + +// Unless overridden, attempt to detect the RTTI state from the compiler. +#ifndef FLUTTER_ENABLE_RTTI +#if defined(_MSC_VER) +#ifdef _CPPRTTI +#define FLUTTER_ENABLE_RTTI 1 +#endif +#elif defined(__clang__) +#if __has_feature(cxx_rtti) +#define FLUTTER_ENABLE_RTTI 1 +#endif +#elif defined(__GNUC__) +#ifdef __GXX_RTTI +#define FLUTTER_ENABLE_RTTI 1 +#endif +#endif +#endif // #ifndef FLUTTER_ENABLE_RTTI + +namespace flutter { + +static_assert(sizeof(double) == 8, "EncodableValue requires a 64-bit double"); + +// A container for arbitrary types in EncodableValue. +// +// This is used in conjunction with StandardCodecExtension to allow using other +// types with a StandardMethodCodec/StandardMessageCodec. It is implicitly +// convertible to EncodableValue, so constructing an EncodableValue from a +// custom type can generally be written as: +// CustomEncodableValue(MyType(...)) +// rather than: +// EncodableValue(CustomEncodableValue(MyType(...))) +// +// For extracting received custom types, it is implicitly convertible to +// std::any. For example: +// const MyType& my_type_value = +// std::any_cast(std::get(value)); +// +// If RTTI is enabled, different extension types can be checked with type(): +// if (custom_value->type() == typeid(SomeData)) { ... } +// Clients that wish to disable RTTI would need to decide on another approach +// for distinguishing types (e.g., in StandardCodecExtension::WriteValueOfType) +// if multiple custom types are needed. For instance, wrapping all of the +// extension types in an EncodableValue-style variant, and only ever storing +// that variant in CustomEncodableValue. +class CustomEncodableValue { + public: + explicit CustomEncodableValue(const std::any& value) : value_(value) {} + ~CustomEncodableValue() = default; + + // Allow implicit conversion to std::any to allow direct use of any_cast. + // NOLINTNEXTLINE(google-explicit-constructor) + operator std::any &() { return value_; } + // NOLINTNEXTLINE(google-explicit-constructor) + operator const std::any &() const { return value_; } + +#if defined(FLUTTER_ENABLE_RTTI) && FLUTTER_ENABLE_RTTI + // Passthrough to std::any's type(). + const std::type_info& type() const noexcept { return value_.type(); } +#endif + + // This operator exists only to provide a stable ordering for use as a + // std::map key, to satisfy the compiler requirements for EncodableValue. + // It does not attempt to provide useful ordering semantics, and using a + // custom value as a map key is not recommended. + bool operator<(const CustomEncodableValue& other) const { + return this < &other; + } + bool operator==(const CustomEncodableValue& other) const { + return this == &other; + } + + private: + std::any value_; +}; + +class EncodableValue; + +// Convenience type aliases. +using EncodableList = std::vector; +using EncodableMap = std::map; + +namespace internal { +// The base class for EncodableValue. Do not use this directly; it exists only +// for EncodableValue to inherit from. +// +// Do not change the order or indexes of the items here; see the comment on +// EncodableValue +using EncodableValueVariant = std::variant, + std::vector, + std::vector, + std::vector, + EncodableList, + EncodableMap, + CustomEncodableValue, + std::vector>; +} // namespace internal + +// An object that can contain any value or collection type supported by +// Flutter's standard method codec. +// +// For details, see: +// https://api.flutter.dev/flutter/services/StandardMessageCodec-class.html +// +// As an example, the following Dart structure: +// { +// 'flag': true, +// 'name': 'Thing', +// 'values': [1, 2.0, 4], +// } +// would correspond to: +// EncodableValue(EncodableMap{ +// {EncodableValue("flag"), EncodableValue(true)}, +// {EncodableValue("name"), EncodableValue("Thing")}, +// {EncodableValue("values"), EncodableValue(EncodableList{ +// EncodableValue(1), +// EncodableValue(2.0), +// EncodableValue(4), +// })}, +// }) +// +// The primary API surface for this object is std::variant. For instance, +// getting a string value from an EncodableValue, with type checking: +// if (std::holds_alternative(value)) { +// std::string some_string = std::get(value); +// } +// +// The order/indexes of the variant types is part of the API surface, and is +// guaranteed not to change. +// +// The variant types are mapped with Dart types in following ways: +// std::monostate -> null +// bool -> bool +// int32_t -> int +// int64_t -> int +// double -> double +// std::string -> String +// std::vector -> Uint8List +// std::vector -> Int32List +// std::vector -> Int64List +// std::vector -> Float32List +// std::vector -> Float64List +// EncodableList -> List +// EncodableMap -> Map +class EncodableValue : public internal::EncodableValueVariant { + public: + // Rely on std::variant for most of the constructors/operators. + using super = internal::EncodableValueVariant; + using super::super; + using super::operator=; + + explicit EncodableValue() = default; + + // Avoid the C++17 pitfall of conversion from char* to bool. Should not be + // needed for C++20. + explicit EncodableValue(const char* string) : super(std::string(string)) {} + EncodableValue& operator=(const char* other) { + *this = std::string(other); + return *this; + } + + // Allow implicit conversion from CustomEncodableValue; the only reason to + // make a CustomEncodableValue (which can only be constructed explicitly) is + // to use it with EncodableValue, so the risk of unintended conversions is + // minimal, and it avoids the need for the verbose: + // EncodableValue(CustomEncodableValue(...)). + // NOLINTNEXTLINE(google-explicit-constructor) + EncodableValue(const CustomEncodableValue& v) : super(v) {} + + // Override the conversion constructors from std::variant to make them + // explicit, to avoid implicit conversion. + // + // While implicit conversion can be convenient in some cases, it can have very + // surprising effects. E.g., calling a function that takes an EncodableValue + // but accidentally passing an EncodableValue* would, instead of failing to + // compile, go through a pointer->bool->EncodableValue(bool) chain and + // silently call the function with a temp-constructed EncodableValue(true). + template + constexpr explicit EncodableValue(T&& t) noexcept : super(t) {} + + // Returns true if the value is null. Convenience wrapper since unlike the + // other types, std::monostate uses aren't self-documenting. + bool IsNull() const { return std::holds_alternative(*this); } + + // Convenience method to simplify handling objects received from Flutter + // where the values may be larger than 32-bit, since they have the same type + // on the Dart side, but will be either 32-bit or 64-bit here depending on + // the value. + // + // Calling this method if the value doesn't contain either an int32_t or an + // int64_t will throw an exception. + int64_t LongValue() const { + if (std::holds_alternative(*this)) { + return std::get(*this); + } + return std::get(*this); + } +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENCODABLE_VALUE_H_ diff --git a/linux/flutter/include/flutter/engine_method_result.cc b/linux/flutter/include/flutter/engine_method_result.cc new file mode 100644 index 0000000000..65eaf5d435 --- /dev/null +++ b/linux/flutter/include/flutter/engine_method_result.cc @@ -0,0 +1,11 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is deprecated in favor of core_implementations.cc. This is a +// temporary forwarding implementation so that the switch to +// core_implementations.cc isn't an immediate breaking change, allowing for the +// template to be updated to include it and update the template version before +// removing this file. + +#include "core_implementations.cc" diff --git a/linux/flutter/include/flutter/engine_method_result.h b/linux/flutter/include/flutter/engine_method_result.h new file mode 100644 index 0000000000..3cc8b6a22c --- /dev/null +++ b/linux/flutter/include/flutter/engine_method_result.h @@ -0,0 +1,86 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_ + +#include +#include +#include + +#include "binary_messenger.h" +#include "method_codec.h" +#include "method_result.h" + +namespace flutter { + +namespace internal { +// Manages the one-time sending of response data. This is an internal helper +// class for EngineMethodResult, separated out since the implementation doesn't +// vary based on the template type. +class ReplyManager { + public: + explicit ReplyManager(BinaryReply reply_handler_); + ~ReplyManager(); + + // Prevent copying. + ReplyManager(ReplyManager const&) = delete; + ReplyManager& operator=(ReplyManager const&) = delete; + + // Sends the given response data (which must either be nullptr, which + // indicates an unhandled method, or a response serialized with |codec_|) to + // the engine. + void SendResponseData(const std::vector* data); + + private: + BinaryReply reply_handler_; +}; +} // namespace internal + +// Implemention of MethodResult that sends a response to the Flutter engine +// exactly once, encoded using a given codec. +template +class EngineMethodResult : public MethodResult { + public: + // Creates a result object that will send results to |reply_handler|, encoded + // using |codec|. The |codec| pointer must remain valid for as long as this + // object exists. + EngineMethodResult(BinaryReply reply_handler, const MethodCodec* codec) + : reply_manager_( + std::make_unique(std::move(reply_handler))), + codec_(codec) {} + + ~EngineMethodResult() = default; + + protected: + // |flutter::MethodResult| + void SuccessInternal(const T* result) override { + std::unique_ptr> data = + codec_->EncodeSuccessEnvelope(result); + reply_manager_->SendResponseData(data.get()); + } + + // |flutter::MethodResult| + void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) override { + std::unique_ptr> data = + codec_->EncodeErrorEnvelope(error_code, error_message, error_details); + reply_manager_->SendResponseData(data.get()); + } + + // |flutter::MethodResult| + void NotImplementedInternal() override { + reply_manager_->SendResponseData(nullptr); + } + + private: + std::unique_ptr reply_manager_; + + const MethodCodec* codec_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_ENGINE_METHOD_RESULT_H_ diff --git a/linux/flutter/include/flutter/event_channel.h b/linux/flutter/include/flutter/event_channel.h new file mode 100644 index 0000000000..fe0e1414d6 --- /dev/null +++ b/linux/flutter/include/flutter/event_channel.h @@ -0,0 +1,173 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_ + +#include +#include +#include + +#include "binary_messenger.h" +#include "engine_method_result.h" +#include "event_sink.h" +#include "event_stream_handler.h" + +namespace flutter { + +class EncodableValue; + +// A named channel for communicating with the Flutter application using +// asynchronous event streams. Incoming requests for event stream setup are +// decoded from binary on receipt, and C++ responses and events are encoded into +// binary before being transmitted back to Flutter. The MethodCodec used must be +// compatible with the one used by the Flutter application. This can be achieved +// by creating an EventChannel +// ("https://api.flutter.dev/flutter/services/EventChannel-class.html") +// counterpart of this channel on the Dart side. +// The C++ type of stream configuration arguments, events, and error details are +// templated, but only values supported by the specified MethodCodec can be +// used. +template +class EventChannel { + public: + // Creates an instance that sends and receives event handler on the channel + // named |name|, encoded with |codec| and dispatched via |messenger|. + EventChannel(BinaryMessenger* messenger, + const std::string& name, + const MethodCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + ~EventChannel() = default; + + // Prevent copying. + EventChannel(EventChannel const&) = delete; + EventChannel& operator=(EventChannel const&) = delete; + + // Registers a stream handler on this channel. + // If no handler has been registered, any incoming stream setup requests will + // be handled silently by providing an empty stream. + void SetStreamHandler(std::unique_ptr> handler) { + if (!handler) { + messenger_->SetMessageHandler(name_, nullptr); + is_listening_ = false; + return; + } + + // std::function requires a copyable lambda, so convert to a shared pointer. + // This is safe since only one copy of the shared_pointer will ever be + // accessed. + std::shared_ptr> shared_handler(handler.release()); + const MethodCodec* codec = codec_; + const std::string channel_name = name_; + const BinaryMessenger* messenger = messenger_; + BinaryMessageHandler binary_handler = [shared_handler, codec, channel_name, + messenger, + this](const uint8_t* message, + const size_t message_size, + BinaryReply reply) { + constexpr char kOnListenMethod[] = "listen"; + constexpr char kOnCancelMethod[] = "cancel"; + + std::unique_ptr> method_call = + codec->DecodeMethodCall(message, message_size); + if (!method_call) { + std::cerr << "Unable to construct method call from message on channel: " + << channel_name << std::endl; + reply(nullptr, 0); + return; + } + + const std::string& method = method_call->method_name(); + if (method.compare(kOnListenMethod) == 0) { + if (is_listening_) { + std::unique_ptr> error = + shared_handler->OnCancel(nullptr); + if (error) { + std::cerr << "Failed to cancel existing stream: " + << (error->error_code) << ", " << (error->error_message) + << ", " << (error->error_details); + } + } + is_listening_ = true; + + std::unique_ptr> result; + auto sink = std::make_unique( + messenger, channel_name, codec); + std::unique_ptr> error = + shared_handler->OnListen(method_call->arguments(), std::move(sink)); + if (error) { + result = codec->EncodeErrorEnvelope( + error->error_code, error->error_message, error->error_details); + } else { + result = codec->EncodeSuccessEnvelope(); + } + reply(result->data(), result->size()); + } else if (method.compare(kOnCancelMethod) == 0) { + std::unique_ptr> result; + if (is_listening_) { + std::unique_ptr> error = + shared_handler->OnCancel(method_call->arguments()); + if (error) { + result = codec->EncodeErrorEnvelope( + error->error_code, error->error_message, error->error_details); + } else { + result = codec->EncodeSuccessEnvelope(); + } + is_listening_ = false; + } else { + result = codec->EncodeErrorEnvelope( + "error", "No active stream to cancel", nullptr); + } + reply(result->data(), result->size()); + } else { + reply(nullptr, 0); + } + }; + messenger_->SetMessageHandler(name_, std::move(binary_handler)); + } + + private: + class EventSinkImplementation : public EventSink { + public: + EventSinkImplementation(const BinaryMessenger* messenger, + const std::string& name, + const MethodCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + ~EventSinkImplementation() = default; + + // Prevent copying. + EventSinkImplementation(EventSinkImplementation const&) = delete; + EventSinkImplementation& operator=(EventSinkImplementation const&) = delete; + + private: + const BinaryMessenger* messenger_; + const std::string name_; + const MethodCodec* codec_; + + protected: + void SuccessInternal(const T* event = nullptr) override { + auto result = codec_->EncodeSuccessEnvelope(event); + messenger_->Send(name_, result->data(), result->size()); + } + + void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) override { + auto result = + codec_->EncodeErrorEnvelope(error_code, error_message, error_details); + messenger_->Send(name_, result->data(), result->size()); + } + + void EndOfStreamInternal() override { messenger_->Send(name_, nullptr, 0); } + }; + + BinaryMessenger* messenger_; + const std::string name_; + const MethodCodec* codec_; + bool is_listening_ = false; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_CHANNEL_H_ diff --git a/linux/flutter/include/flutter/event_sink.h b/linux/flutter/include/flutter/event_sink.h new file mode 100644 index 0000000000..789be1eb96 --- /dev/null +++ b/linux/flutter/include/flutter/event_sink.h @@ -0,0 +1,62 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ + +namespace flutter { + +class EncodableValue; + +// Event callback. Events to be sent to Flutter application +// act as clients of this interface for sending events. +template +class EventSink { + public: + EventSink() = default; + virtual ~EventSink() = default; + + // Prevent copying. + EventSink(EventSink const&) = delete; + EventSink& operator=(EventSink const&) = delete; + + // Consumes a successful event + void Success(const T& event) { SuccessInternal(&event); } + + // Consumes a successful event. + void Success() { SuccessInternal(nullptr); } + + // Consumes an error event. + void Error(const std::string& error_code, + const std::string& error_message, + const T& error_details) { + ErrorInternal(error_code, error_message, &error_details); + } + + // Consumes an error event. + void Error(const std::string& error_code, + const std::string& error_message = "") { + ErrorInternal(error_code, error_message, nullptr); + } + + // Consumes end of stream. Ensuing calls to Success() or + // Error(), if any, are ignored. + void EndOfStream() { EndOfStreamInternal(); } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual void SuccessInternal(const T* event = nullptr) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void EndOfStreamInternal() = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ diff --git a/linux/flutter/include/flutter/event_stream_handler.h b/linux/flutter/include/flutter/event_stream_handler.h new file mode 100644 index 0000000000..9eced6cf70 --- /dev/null +++ b/linux/flutter/include/flutter/event_stream_handler.h @@ -0,0 +1,74 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_H_ + +#include "event_sink.h" + +namespace flutter { + +class EncodableValue; + +template +struct StreamHandlerError { + const std::string& error_code; + const std::string& error_message; + const T* error_details; + + StreamHandlerError(const std::string& error_code, + const std::string& error_message, + const T* error_details) + : error_code(error_code), + error_message(error_message), + error_details(error_details) {} +}; + +// Handler for stream setup and teardown requests. +// Implementations must be prepared to accept sequences of alternating calls to +// OnListen() and OnCancel(). Implementations should ideally consume no +// resources when the last such call is not OnListen(). In typical situations, +// this means that the implementation should register itself with +// platform-specific event sources OnListen() and deregister again OnCancel(). +template +class StreamHandler { + public: + StreamHandler() = default; + virtual ~StreamHandler() = default; + + // Prevent copying. + StreamHandler(StreamHandler const&) = delete; + StreamHandler& operator=(StreamHandler const&) = delete; + + // Handles a request to set up an event stream. Returns nullptr on success, + // or an error on failure. + // |arguments| is stream configuration arguments and + // |events| is an EventSink for emitting events to the Flutter receiver. + std::unique_ptr> OnListen( + const T* arguments, + std::unique_ptr>&& events) { + return OnListenInternal(arguments, std::move(events)); + } + + // Handles a request to tear down the most recently created event stream. + // Returns nullptr on success, or an error on failure. + // |arguments| is stream configuration arguments. + std::unique_ptr> OnCancel(const T* arguments) { + return OnCancelInternal(arguments); + } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> OnListenInternal( + const T* arguments, + std::unique_ptr>&& events) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> OnCancelInternal( + const T* arguments) = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_H_ diff --git a/linux/flutter/include/flutter/event_stream_handler_functions.h b/linux/flutter/include/flutter/event_stream_handler_functions.h new file mode 100644 index 0000000000..fde4ce4410 --- /dev/null +++ b/linux/flutter/include/flutter/event_stream_handler_functions.h @@ -0,0 +1,78 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_FUNCTIONS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_FUNCTIONS_H_ + +#include + +#include "event_sink.h" +#include "event_stream_handler.h" + +namespace flutter { + +class EncodableValue; + +// Handler types for each of the StreamHandler setup and teardown +// requests. +template +using StreamHandlerListen = + std::function>( + const T* arguments, + std::unique_ptr>&& events)>; + +template +using StreamHandlerCancel = + std::function>(const T* arguments)>; + +// An implementation of StreamHandler that pass calls through to +// provided function objects. +template +class StreamHandlerFunctions : public StreamHandler { + public: + // Creates a handler object that calls the provided functions + // for the corresponding StreamHandler outcomes. + StreamHandlerFunctions(StreamHandlerListen on_listen, + StreamHandlerCancel on_cancel) + : on_listen_(on_listen), on_cancel_(on_cancel) {} + + virtual ~StreamHandlerFunctions() = default; + + // Prevent copying. + StreamHandlerFunctions(StreamHandlerFunctions const&) = delete; + StreamHandlerFunctions& operator=(StreamHandlerFunctions const&) = delete; + + protected: + // |flutter::StreamHandler| + std::unique_ptr> OnListenInternal( + const T* arguments, + std::unique_ptr>&& events) override { + if (on_listen_) { + return on_listen_(arguments, std::move(events)); + } + + auto error = std::make_unique>( + "error", "No OnListen handler set", nullptr); + return std::move(error); + } + + // |flutter::StreamHandler| + std::unique_ptr> OnCancelInternal( + const T* arguments) override { + if (on_cancel_) { + return on_cancel_(arguments); + } + + auto error = std::make_unique>( + "error", "No OnCancel handler set", nullptr); + return std::move(error); + } + + StreamHandlerListen on_listen_; + StreamHandlerCancel on_cancel_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_STREAM_HANDLER_FUNCTIONS_H_ diff --git a/linux/flutter/include/flutter/message_codec.h b/linux/flutter/include/flutter/message_codec.h new file mode 100644 index 0000000000..c84d25f241 --- /dev/null +++ b/linux/flutter/include/flutter/message_codec.h @@ -0,0 +1,62 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_MESSAGE_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_MESSAGE_CODEC_H_ + +#include +#include +#include + +namespace flutter { + +// Translates between a binary message and higher-level method call and +// response/error objects. +template +class MessageCodec { + public: + MessageCodec() = default; + + virtual ~MessageCodec() = default; + + // Prevent copying. + MessageCodec(MessageCodec const&) = delete; + MessageCodec& operator=(MessageCodec const&) = delete; + + // Returns the message encoded in |binary_message|, or nullptr if it cannot be + // decoded by this codec. + std::unique_ptr DecodeMessage(const uint8_t* binary_message, + const size_t message_size) const { + return std::move(DecodeMessageInternal(binary_message, message_size)); + } + + // Returns the message encoded in |binary_message|, or nullptr if it cannot be + // decoded by this codec. + std::unique_ptr DecodeMessage( + const std::vector& binary_message) const { + size_t size = binary_message.size(); + const uint8_t* data = size > 0 ? &binary_message[0] : nullptr; + return std::move(DecodeMessageInternal(data, size)); + } + + // Returns a binary encoding of the given |message|, or nullptr if the + // message cannot be serialized by this codec. + std::unique_ptr> EncodeMessage(const T& message) const { + return std::move(EncodeMessageInternal(message)); + } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr DecodeMessageInternal( + const uint8_t* binary_message, + const size_t message_size) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeMessageInternal( + const T& message) const = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_MESSAGE_CODEC_H_ diff --git a/linux/flutter/include/flutter/method_call.h b/linux/flutter/include/flutter/method_call.h new file mode 100644 index 0000000000..f9a9c287ed --- /dev/null +++ b/linux/flutter/include/flutter/method_call.h @@ -0,0 +1,43 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TYPED_METHOD_CALL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TYPED_METHOD_CALL_H_ + +#include +#include + +namespace flutter { + +class EncodableValue; + +// An object encapsulating a method call from Flutter whose arguments are of +// type T. +template +class MethodCall { + public: + // Creates a MethodCall with the given name and arguments. + MethodCall(const std::string& method_name, std::unique_ptr arguments) + : method_name_(method_name), arguments_(std::move(arguments)) {} + + virtual ~MethodCall() = default; + + // Prevent copying. + MethodCall(MethodCall const&) = delete; + MethodCall& operator=(MethodCall const&) = delete; + + // The name of the method being called. + const std::string& method_name() const { return method_name_; } + + // The arguments to the method call, or NULL if there are none. + const T* arguments() const { return arguments_.get(); } + + private: + std::string method_name_; + std::unique_ptr arguments_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TYPED_METHOD_CALL_H_ diff --git a/linux/flutter/include/flutter/method_channel.h b/linux/flutter/include/flutter/method_channel.h new file mode 100644 index 0000000000..e9ed6161c4 --- /dev/null +++ b/linux/flutter/include/flutter/method_channel.h @@ -0,0 +1,132 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CHANNEL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CHANNEL_H_ + +#include +#include + +#include "binary_messenger.h" +#include "engine_method_result.h" +#include "method_call.h" +#include "method_codec.h" +#include "method_result.h" + +namespace flutter { + +class EncodableValue; + +// A handler for receiving a method call from the Flutter engine. +// +// Implementations must asynchronously call exactly one of the methods on +// |result| to indicate the result of the method call. +template +using MethodCallHandler = + std::function& call, + std::unique_ptr> result)>; + +// A channel for communicating with the Flutter engine using invocation of +// asynchronous methods. +template +class MethodChannel { + public: + // Creates an instance that sends and receives method calls on the channel + // named |name|, encoded with |codec| and dispatched via |messenger|. + MethodChannel(BinaryMessenger* messenger, + const std::string& name, + const MethodCodec* codec) + : messenger_(messenger), name_(name), codec_(codec) {} + + ~MethodChannel() = default; + + // Prevent copying. + MethodChannel(MethodChannel const&) = delete; + MethodChannel& operator=(MethodChannel const&) = delete; + + // Sends a message to the Flutter engine on this channel. + // + // If |result| is provided, one of its methods will be invoked with the + // response from the engine. + void InvokeMethod(const std::string& method, + std::unique_ptr arguments, + std::unique_ptr> result = nullptr) { + MethodCall method_call(method, std::move(arguments)); + std::unique_ptr> message = + codec_->EncodeMethodCall(method_call); + if (!result) { + messenger_->Send(name_, message->data(), message->size(), nullptr); + return; + } + + // std::function requires a copyable lambda, so convert to a shared pointer. + // This is safe since only one copy of the shared_pointer will ever be + // accessed. + std::shared_ptr> shared_result(result.release()); + const auto* codec = codec_; + std::string channel_name = name_; + BinaryReply reply_handler = [shared_result, codec, channel_name]( + const uint8_t* reply, size_t reply_size) { + if (reply_size == 0) { + shared_result->NotImplemented(); + return; + } + // Use this channel's codec to decode and handle the + // reply. + bool decoded = codec->DecodeAndProcessResponseEnvelope( + reply, reply_size, shared_result.get()); + if (!decoded) { + std::cerr << "Unable to decode reply to method " + "invocation on channel " + << channel_name << std::endl; + shared_result->NotImplemented(); + } + }; + + messenger_->Send(name_, message->data(), message->size(), + std::move(reply_handler)); + } + + // Registers a handler that should be called any time a method call is + // received on this channel. A null handler will remove any previous handler. + // + // Note that the MethodChannel does not own the handler, and will not + // unregister it on destruction, so the caller is responsible for + // unregistering explicitly if it should no longer be called. + void SetMethodCallHandler(MethodCallHandler handler) const { + if (!handler) { + messenger_->SetMessageHandler(name_, nullptr); + return; + } + const auto* codec = codec_; + std::string channel_name = name_; + BinaryMessageHandler binary_handler = [handler, codec, channel_name]( + const uint8_t* message, + size_t message_size, + BinaryReply reply) { + // Use this channel's codec to decode the call and build a result handler. + auto result = + std::make_unique>(std::move(reply), codec); + std::unique_ptr> method_call = + codec->DecodeMethodCall(message, message_size); + if (!method_call) { + std::cerr << "Unable to construct method call from message on channel " + << channel_name << std::endl; + result->NotImplemented(); + return; + } + handler(*method_call, std::move(result)); + }; + messenger_->SetMessageHandler(name_, std::move(binary_handler)); + } + + private: + BinaryMessenger* messenger_; + std::string name_; + const MethodCodec* codec_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CHANNEL_H_ diff --git a/linux/flutter/include/flutter/method_codec.h b/linux/flutter/include/flutter/method_codec.h new file mode 100644 index 0000000000..b40fa640c9 --- /dev/null +++ b/linux/flutter/include/flutter/method_codec.h @@ -0,0 +1,111 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CODEC_H_ + +#include +#include +#include + +#include "method_call.h" +#include "method_result.h" + +namespace flutter { + +// Translates between a binary message and higher-level method call and +// response/error objects. +template +class MethodCodec { + public: + MethodCodec() = default; + + virtual ~MethodCodec() = default; + + // Prevent copying. + MethodCodec(MethodCodec const&) = delete; + MethodCodec& operator=(MethodCodec const&) = delete; + + // Returns the MethodCall encoded in |message|, or nullptr if it cannot be + // decoded. + std::unique_ptr> DecodeMethodCall(const uint8_t* message, + size_t message_size) const { + return std::move(DecodeMethodCallInternal(message, message_size)); + } + + // Returns the MethodCall encoded in |message|, or nullptr if it cannot be + // decoded. + std::unique_ptr> DecodeMethodCall( + const std::vector& message) const { + size_t size = message.size(); + const uint8_t* data = size > 0 ? &message[0] : nullptr; + return std::move(DecodeMethodCallInternal(data, size)); + } + + // Returns a binary encoding of the given |method_call|, or nullptr if the + // method call cannot be serialized by this codec. + std::unique_ptr> EncodeMethodCall( + const MethodCall& method_call) const { + return std::move(EncodeMethodCallInternal(method_call)); + } + + // Returns a binary encoding of |result|. |result| must be a type supported + // by the codec. + std::unique_ptr> EncodeSuccessEnvelope( + const T* result = nullptr) const { + return std::move(EncodeSuccessEnvelopeInternal(result)); + } + + // Returns a binary encoding of |error|. The |error_details| must be a type + // supported by the codec. + std::unique_ptr> EncodeErrorEnvelope( + const std::string& error_code, + const std::string& error_message = "", + const T* error_details = nullptr) const { + return std::move( + EncodeErrorEnvelopeInternal(error_code, error_message, error_details)); + } + + // Decodes the response envelope encoded in |response|, calling the + // appropriate method on |result|. + // + // Returns false if |response| cannot be decoded. In that case the caller is + // responsible for calling a |result| method. + bool DecodeAndProcessResponseEnvelope(const uint8_t* response, + size_t response_size, + MethodResult* result) const { + return DecodeAndProcessResponseEnvelopeInternal(response, response_size, + result); + } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> DecodeMethodCallInternal( + const uint8_t* message, + size_t message_size) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeMethodCallInternal( + const MethodCall& method_call) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeSuccessEnvelopeInternal( + const T* result) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual std::unique_ptr> EncodeErrorEnvelopeInternal( + const std::string& error_code, + const std::string& error_message, + const T* error_details) const = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual bool DecodeAndProcessResponseEnvelopeInternal( + const uint8_t* response, + size_t response_size, + MethodResult* result) const = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_CODEC_H_ diff --git a/linux/flutter/include/flutter/method_result.h b/linux/flutter/include/flutter/method_result.h new file mode 100644 index 0000000000..05c7fe9687 --- /dev/null +++ b/linux/flutter/include/flutter/method_result.h @@ -0,0 +1,76 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_H_ + +#include + +namespace flutter { + +class EncodableValue; + +// Encapsulates a result returned from a MethodCall. Only one method should be +// called on any given instance. +template +class MethodResult { + public: + MethodResult() = default; + + virtual ~MethodResult() = default; + + // Prevent copying. + MethodResult(MethodResult const&) = delete; + MethodResult& operator=(MethodResult const&) = delete; + + // Sends a success response, indicating that the call completed successfully + // with the given result. + void Success(const T& result) { SuccessInternal(&result); } + + // Sends a success response, indicating that the call completed successfully + // with no result. + void Success() { SuccessInternal(nullptr); } + + // Sends an error response, indicating that the call was understood but + // handling failed in some way. + // + // error_code: A string error code describing the error. + // error_message: A user-readable error message. + // error_details: Arbitrary extra details about the error. + void Error(const std::string& error_code, + const std::string& error_message, + const T& error_details) { + ErrorInternal(error_code, error_message, &error_details); + } + + // Sends an error response, indicating that the call was understood but + // handling failed in some way. + // + // error_code: A string error code describing the error. + // error_message: A user-readable error message (optional). + void Error(const std::string& error_code, + const std::string& error_message = "") { + ErrorInternal(error_code, error_message, nullptr); + } + + // Sends a not-implemented response, indicating that the method either was not + // recognized, or has not been implemented. + void NotImplemented() { NotImplementedInternal(); } + + protected: + // Implementation of the public interface, to be provided by subclasses. + virtual void SuccessInternal(const T* result) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) = 0; + + // Implementation of the public interface, to be provided by subclasses. + virtual void NotImplementedInternal() = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_H_ diff --git a/linux/flutter/include/flutter/method_result_functions.h b/linux/flutter/include/flutter/method_result_functions.h new file mode 100644 index 0000000000..a19cc349d1 --- /dev/null +++ b/linux/flutter/include/flutter/method_result_functions.h @@ -0,0 +1,79 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_ + +#include +#include + +#include "method_result.h" + +namespace flutter { + +class EncodableValue; + +// Handler types for each of the MethodResult outcomes. +template +using ResultHandlerSuccess = std::function; +template +using ResultHandlerError = std::function; +template +using ResultHandlerNotImplemented = std::function; + +// An implementation of MethodResult that pass calls through to provided +// function objects, for ease of constructing one-off result handlers. +template +class MethodResultFunctions : public MethodResult { + public: + // Creates a result object that calls the provided functions for the + // corresponding MethodResult outcomes. + MethodResultFunctions(ResultHandlerSuccess on_success, + ResultHandlerError on_error, + ResultHandlerNotImplemented on_not_implemented) + : on_success_(on_success), + on_error_(on_error), + on_not_implemented_(on_not_implemented) {} + + virtual ~MethodResultFunctions() = default; + + // Prevent copying. + MethodResultFunctions(MethodResultFunctions const&) = delete; + MethodResultFunctions& operator=(MethodResultFunctions const&) = delete; + + protected: + // |flutter::MethodResult| + void SuccessInternal(const T* result) override { + if (on_success_) { + on_success_(result); + } + } + + // |flutter::MethodResult| + void ErrorInternal(const std::string& error_code, + const std::string& error_message, + const T* error_details) override { + if (on_error_) { + on_error_(error_code, error_message, error_details); + } + } + + // |flutter::MethodResult| + void NotImplementedInternal() override { + if (on_not_implemented_) { + on_not_implemented_(); + } + } + + private: + ResultHandlerSuccess on_success_; + ResultHandlerError on_error_; + ResultHandlerNotImplemented on_not_implemented_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_ diff --git a/linux/flutter/include/flutter/plugin_registrar.h b/linux/flutter/include/flutter/plugin_registrar.h new file mode 100644 index 0000000000..26a6c3b7ed --- /dev/null +++ b/linux/flutter/include/flutter/plugin_registrar.h @@ -0,0 +1,131 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_PLUGIN_REGISTRAR_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_PLUGIN_REGISTRAR_H_ + +#include + +#include +#include +#include +#include + +#include "binary_messenger.h" +#include "texture_registrar.h" + +namespace flutter { + +class Plugin; + +// A object managing the registration of a plugin for various events. +// +// Currently this class has very limited functionality, but is expected to +// expand over time to more closely match the functionality of +// the Flutter mobile plugin APIs' plugin registrars. +class PluginRegistrar { + public: + // Creates a new PluginRegistrar. |core_registrar| and the messenger it + // provides must remain valid as long as this object exists. + explicit PluginRegistrar(FlPluginRegistrar* core_registrar); + + virtual ~PluginRegistrar(); + + // Prevent copying. + PluginRegistrar(PluginRegistrar const&) = delete; + PluginRegistrar& operator=(PluginRegistrar const&) = delete; + + // Returns the messenger to use for creating channels to communicate with the + // Flutter engine. + // + // This pointer will remain valid for the lifetime of this instance. + BinaryMessenger* messenger() { return messenger_.get(); } + + // Returns the texture registrar to use for the plugin to render a pixel + // buffer. + TextureRegistrar* texture_registrar() { return texture_registrar_.get(); } + + // Takes ownership of |plugin|. + // + // Plugins are not required to call this method if they have other lifetime + // management, but this is a convient place for plugins to be owned to ensure + // that they stay valid for any registered callbacks. + void AddPlugin(std::unique_ptr plugin); + + protected: + FlPluginRegistrar* registrar() { return registrar_; } + + // Destroys all owned plugins. Subclasses should call this at the beginning of + // their destructors to prevent the possibility of an owned plugin trying to + // access destroyed state during its own destruction. + void ClearPlugins(); + + private: + // Handle for interacting with the C API's registrar. + FlPluginRegistrar* registrar_; + + std::unique_ptr messenger_; + + std::unique_ptr texture_registrar_; + + // Plugins registered for ownership. + std::set> plugins_; +}; + +// A plugin that can be registered for ownership by a PluginRegistrar. +class Plugin { + public: + virtual ~Plugin() = default; +}; + +// A singleton to own PluginRegistrars. This is intended for use in plugins, +// where there is no higher-level object to own a PluginRegistrar that can +// own plugin instances and ensure that they live as long as the engine they +// are registered with. +class PluginRegistrarManager { + public: + static PluginRegistrarManager* GetInstance(); + + // Prevent copying. + PluginRegistrarManager(PluginRegistrarManager const&) = delete; + PluginRegistrarManager& operator=(PluginRegistrarManager const&) = delete; + + // Returns a plugin registrar wrapper of type T, which must be a kind of + // PluginRegistrar, creating it if necessary. The returned registrar will + // live as long as the underlying FlutterDesktopPluginRegistrarRef, so + // can be used to own plugin instances. + // + // Calling this multiple times for the same registrar_ref with different + // template types results in undefined behavior. + template + T* GetRegistrar(FlPluginRegistrar* registrar_ref) { + auto insert_result = + registrars_.emplace(registrar_ref, std::make_unique(registrar_ref)); + auto& registrar_pair = *(insert_result.first); + FlutterDesktopPluginRegistrarSetDestructionHandler(registrar_pair.first, + OnRegistrarDestroyed); + return static_cast(registrar_pair.second.get()); + } + + // Destroys all registrar wrappers created by the manager. + // + // This is intended primarily for use in tests. + void Reset() { registrars_.clear(); } + + private: + PluginRegistrarManager(); + + using WrapperMap = + std::map>; + + static void OnRegistrarDestroyed(FlPluginRegistrar* registrar); + + WrapperMap* registrars() { return ®istrars_; } + + WrapperMap registrars_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_PLUGIN_REGISTRAR_H_ diff --git a/linux/flutter/include/flutter/standard_codec_serializer.h b/linux/flutter/include/flutter/standard_codec_serializer.h new file mode 100644 index 0000000000..a6001037f9 --- /dev/null +++ b/linux/flutter/include/flutter/standard_codec_serializer.h @@ -0,0 +1,76 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_CODEC_SERIALIZER_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_CODEC_SERIALIZER_H_ + +#include "byte_streams.h" +#include "encodable_value.h" + +namespace flutter { + +// Encapsulates the logic for encoding/decoding EncodableValues to/from the +// standard codec binary representation. +// +// This can be subclassed to extend the standard codec with support for new +// types. +class StandardCodecSerializer { + public: + virtual ~StandardCodecSerializer(); + + // Returns the shared serializer instance. + static const StandardCodecSerializer& GetInstance(); + + // Prevent copying. + StandardCodecSerializer(StandardCodecSerializer const&) = delete; + StandardCodecSerializer& operator=(StandardCodecSerializer const&) = delete; + + // Reads and returns the next value from |stream|. + EncodableValue ReadValue(ByteStreamReader* stream) const; + + // Writes the encoding of |value| to |stream|, including the initial type + // discrimination byte. + // + // Can be overridden by a subclass to extend the codec. + virtual void WriteValue(const EncodableValue& value, + ByteStreamWriter* stream) const; + + protected: + // Codecs require long-lived serializers, so clients should always use + // GetInstance(). + StandardCodecSerializer(); + + // Reads and returns the next value from |stream|, whose discrimination byte + // was |type|. + // + // The discrimination byte will already have been read from the stream when + // this is called. + // + // Can be overridden by a subclass to extend the codec. + virtual EncodableValue ReadValueOfType(uint8_t type, + ByteStreamReader* stream) const; + + // Reads the variable-length size from the current position in |stream|. + size_t ReadSize(ByteStreamReader* stream) const; + + // Writes the variable-length size encoding to |stream|. + void WriteSize(size_t size, ByteStreamWriter* stream) const; + + private: + // Reads a fixed-type list whose values are of type T from the current + // position in |stream|, and returns it as the corresponding EncodableValue. + // |T| must correspond to one of the supported list value types of + // EncodableValue. + template + EncodableValue ReadVector(ByteStreamReader* stream) const; + + // Writes |vector| to |stream| as a fixed-type list. |T| must correspond to + // one of the supported list value types of EncodableValue. + template + void WriteVector(const std::vector vector, ByteStreamWriter* stream) const; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_CODEC_SERIALIZER_H_ diff --git a/linux/flutter/include/flutter/standard_message_codec.h b/linux/flutter/include/flutter/standard_message_codec.h new file mode 100644 index 0000000000..568b3917b3 --- /dev/null +++ b/linux/flutter/include/flutter/standard_message_codec.h @@ -0,0 +1,57 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_MESSAGE_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_MESSAGE_CODEC_H_ + +#include + +#include "encodable_value.h" +#include "message_codec.h" +#include "standard_codec_serializer.h" + +namespace flutter { + +// A binary message encoding/decoding mechanism for communications to/from the +// Flutter engine via message channels. +class StandardMessageCodec : public MessageCodec { + public: + // Returns an instance of the codec, optionally using a custom serializer to + // add support for more types. + // + // If provided, |serializer| must be long-lived. If no serializer is provided, + // the default will be used. + // + // The instance returned for a given |serializer| will be shared, and + // any instance returned from this will be long-lived, and can be safely + // passed to, e.g., channel constructors. + static const StandardMessageCodec& GetInstance( + const StandardCodecSerializer* serializer = nullptr); + + ~StandardMessageCodec(); + + // Prevent copying. + StandardMessageCodec(StandardMessageCodec const&) = delete; + StandardMessageCodec& operator=(StandardMessageCodec const&) = delete; + + protected: + // |flutter::MessageCodec| + std::unique_ptr DecodeMessageInternal( + const uint8_t* binary_message, + const size_t message_size) const override; + + // |flutter::MessageCodec| + std::unique_ptr> EncodeMessageInternal( + const EncodableValue& message) const override; + + private: + // Instances should be obtained via GetInstance. + explicit StandardMessageCodec(const StandardCodecSerializer* serializer); + + const StandardCodecSerializer* serializer_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_MESSAGE_CODEC_H_ diff --git a/linux/flutter/include/flutter/standard_method_codec.h b/linux/flutter/include/flutter/standard_method_codec.h new file mode 100644 index 0000000000..b6db35ec97 --- /dev/null +++ b/linux/flutter/include/flutter/standard_method_codec.h @@ -0,0 +1,73 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_METHOD_CODEC_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_METHOD_CODEC_H_ + +#include + +#include "encodable_value.h" +#include "method_call.h" +#include "method_codec.h" +#include "standard_codec_serializer.h" + +namespace flutter { + +// An implementation of MethodCodec that uses a binary serialization. +class StandardMethodCodec : public MethodCodec { + public: + // Returns an instance of the codec, optionally using a custom serializer to + // add support for more types. + // + // If provided, |serializer| must be long-lived. If no serializer is provided, + // the default will be used. + // + // The instance returned for a given |extension| will be shared, and + // any instance returned from this will be long-lived, and can be safely + // passed to, e.g., channel constructors. + static const StandardMethodCodec& GetInstance( + const StandardCodecSerializer* serializer = nullptr); + + ~StandardMethodCodec(); + + // Prevent copying. + StandardMethodCodec(StandardMethodCodec const&) = delete; + StandardMethodCodec& operator=(StandardMethodCodec const&) = delete; + + protected: + // |flutter::MethodCodec| + std::unique_ptr> DecodeMethodCallInternal( + const uint8_t* message, + size_t message_size) const override; + + // |flutter::MethodCodec| + std::unique_ptr> EncodeMethodCallInternal( + const MethodCall& method_call) const override; + + // |flutter::MethodCodec| + std::unique_ptr> EncodeSuccessEnvelopeInternal( + const EncodableValue* result) const override; + + // |flutter::MethodCodec| + std::unique_ptr> EncodeErrorEnvelopeInternal( + const std::string& error_code, + const std::string& error_message, + const EncodableValue* error_details) const override; + + // |flutter::MethodCodec| + bool DecodeAndProcessResponseEnvelopeInternal( + const uint8_t* response, + size_t response_size, + MethodResult* result) const override; + + private: + // Instances should be obtained via GetInstance. + explicit StandardMethodCodec(const StandardCodecSerializer* serializer); + + const StandardCodecSerializer* serializer_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_STANDARD_METHOD_CODEC_H_ diff --git a/linux/flutter/include/flutter/texture_registrar.h b/linux/flutter/include/flutter/texture_registrar.h new file mode 100644 index 0000000000..10d0111769 --- /dev/null +++ b/linux/flutter/include/flutter/texture_registrar.h @@ -0,0 +1,89 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_ + +//#include + +#include +#include +#include +#include + +// An image buffer object. +typedef struct { + // The pixel data buffer. + const uint8_t* buffer; + // Width of the pixel buffer. + size_t width; + // Height of the pixel buffer. + size_t height; + // An optional callback that gets invoked when the |buffer| can be released. + void (*release_callback)(void* release_context); + // Opaque data passed to |release_callback|. + void* release_context; +} FlutterDesktopPixelBuffer; + +namespace flutter { + +// A pixel buffer texture. +class PixelBufferTexture { + public: + // A callback used for retrieving pixel buffers. + typedef std::function + CopyBufferCallback; + + // Creates a pixel buffer texture that uses the provided |copy_buffer_cb| to + // retrieve the buffer. + // As the callback is usually invoked from the render thread, the callee must + // take care of proper synchronization. It also needs to be ensured that the + // returned buffer isn't released prior to unregistering this texture. + explicit PixelBufferTexture(CopyBufferCallback copy_buffer_callback) + : copy_buffer_callback_(copy_buffer_callback) {} + + // Returns the callback-provided FlutterDesktopPixelBuffer that contains the + // actual pixel data. The intended surface size is specified by |width| and + // |height|. + const FlutterDesktopPixelBuffer* CopyPixelBuffer(size_t width, + size_t height) const { + return copy_buffer_callback_(width, height); + } + + private: + const CopyBufferCallback copy_buffer_callback_; +}; + +// The available texture variants. +// Only PixelBufferTexture is currently implemented. +// Other variants are expected to be added in the future. +typedef std::variant TextureVariant; + +// An object keeping track of external textures. +// +// Thread safety: +// It's safe to call the member methods from any thread. +class TextureRegistrar { + public: + virtual ~TextureRegistrar() = default; + + // Registers a |texture| object and returns the ID for that texture. + virtual int64_t RegisterTexture(TextureVariant* texture) = 0; + + // Notifies the flutter engine that the texture object corresponding + // to |texure_id| needs to render a new frame. + // + // For PixelBufferTextures, this will effectively make the engine invoke + // the callback that was provided upon creating the texture. + virtual bool MarkTextureFrameAvailable(int64_t texture_id) = 0; + + // Unregisters an existing Texture object. + // Textures must not be unregistered while they're in use. + virtual bool UnregisterTexture(int64_t texture_id) = 0; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_TEXTURE_REGISTRAR_H_ diff --git a/linux/flutter/plugin_registrar.cc b/linux/flutter/plugin_registrar.cc new file mode 100644 index 0000000000..5e61d9042c --- /dev/null +++ b/linux/flutter/plugin_registrar.cc @@ -0,0 +1,60 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "include/flutter/plugin_registrar.h" + +#include +#include + +#include "binary_messenger_impl.h" +#include "include/flutter/engine_method_result.h" +#include "include/flutter/method_channel.h" +#include "texture_registrar_impl.h" + +namespace flutter { + +// ===== PluginRegistrar ===== + +PluginRegistrar::PluginRegistrar(FlPluginRegistrar* registrar) + : registrar_(registrar) { + auto core_messenger = fl_plugin_registrar_get_messenger(registrar); + messenger_ = std::make_unique(core_messenger); + auto texture_registrar = fl_plugin_registrar_get_texture_registrar(registrar); + texture_registrar_ = + std::make_unique(texture_registrar); +} + +PluginRegistrar::~PluginRegistrar() { + // This must always be the first call. + ClearPlugins(); + + // Explicitly cleared to facilitate testing of destruction order. + messenger_.reset(); +} + +void PluginRegistrar::AddPlugin(std::unique_ptr plugin) { + plugins_.insert(std::move(plugin)); +} + +void PluginRegistrar::ClearPlugins() { + plugins_.clear(); +} + +// ===== PluginRegistrarManager ===== + +// static +PluginRegistrarManager* PluginRegistrarManager::GetInstance() { + static PluginRegistrarManager* instance = new PluginRegistrarManager(); + return instance; +} + +PluginRegistrarManager::PluginRegistrarManager() = default; + +// static +void PluginRegistrarManager::OnRegistrarDestroyed( + FlPluginRegistrar* registrar) { + GetInstance()->registrars()->erase(registrar); +} + +} // namespace flutter diff --git a/linux/flutter/standard_codec.cc b/linux/flutter/standard_codec.cc new file mode 100644 index 0000000000..807e06816b --- /dev/null +++ b/linux/flutter/standard_codec.cc @@ -0,0 +1,466 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains what would normally be standard_codec_serializer.cc, +// standard_message_codec.cc, and standard_method_codec.cc. They are grouped +// together to simplify use of the client wrapper, since the common case is +// that any client that needs one of these files needs all three. + +#include +#include +#include +#include +#include +#include + +#include "byte_buffer_streams.h" +#include "include/flutter/standard_codec_serializer.h" +#include "include/flutter/standard_message_codec.h" +#include "include/flutter/standard_method_codec.h" + +namespace flutter { + +// ===== standard_codec_serializer.h ===== + +namespace { + +// The order/values here must match the constants in message_codecs.dart. +enum class EncodedType { + kNull = 0, + kTrue, + kFalse, + kInt32, + kInt64, + kLargeInt, // No longer used. If encountered, treat as kString. + kFloat64, + kString, + kUInt8List, + kInt32List, + kInt64List, + kFloat64List, + kList, + kMap, + kFloat32List, +}; + +// Returns the encoded type that should be written when serializing |value|. +EncodedType EncodedTypeForValue(const EncodableValue& value) { + switch (value.index()) { + case 0: + return EncodedType::kNull; + case 1: + return std::get(value) ? EncodedType::kTrue : EncodedType::kFalse; + case 2: + return EncodedType::kInt32; + case 3: + return EncodedType::kInt64; + case 4: + return EncodedType::kFloat64; + case 5: + return EncodedType::kString; + case 6: + return EncodedType::kUInt8List; + case 7: + return EncodedType::kInt32List; + case 8: + return EncodedType::kInt64List; + case 9: + return EncodedType::kFloat64List; + case 10: + return EncodedType::kList; + case 11: + return EncodedType::kMap; + case 13: + return EncodedType::kFloat32List; + } + assert(false); + return EncodedType::kNull; +} + +} // namespace + +StandardCodecSerializer::StandardCodecSerializer() = default; + +StandardCodecSerializer::~StandardCodecSerializer() = default; + +const StandardCodecSerializer& StandardCodecSerializer::GetInstance() { + static StandardCodecSerializer sInstance; + return sInstance; +}; + +EncodableValue StandardCodecSerializer::ReadValue( + ByteStreamReader* stream) const { + uint8_t type = stream->ReadByte(); + return ReadValueOfType(type, stream); +} + +void StandardCodecSerializer::WriteValue(const EncodableValue& value, + ByteStreamWriter* stream) const { + stream->WriteByte(static_cast(EncodedTypeForValue(value))); + // TODO: Consider replacing this this with a std::visitor. + switch (value.index()) { + case 0: + case 1: + // Null and bool are encoded directly in the type. + break; + case 2: + stream->WriteInt32(std::get(value)); + break; + case 3: + stream->WriteInt64(std::get(value)); + break; + case 4: + stream->WriteAlignment(8); + stream->WriteDouble(std::get(value)); + break; + case 5: { + const auto& string_value = std::get(value); + size_t size = string_value.size(); + WriteSize(size, stream); + if (size > 0) { + stream->WriteBytes( + reinterpret_cast(string_value.data()), size); + } + break; + } + case 6: + WriteVector(std::get>(value), stream); + break; + case 7: + WriteVector(std::get>(value), stream); + break; + case 8: + WriteVector(std::get>(value), stream); + break; + case 9: + WriteVector(std::get>(value), stream); + break; + case 10: { + const auto& list = std::get(value); + WriteSize(list.size(), stream); + for (const auto& item : list) { + WriteValue(item, stream); + } + break; + } + case 11: { + const auto& map = std::get(value); + WriteSize(map.size(), stream); + for (const auto& pair : map) { + WriteValue(pair.first, stream); + WriteValue(pair.second, stream); + } + break; + } + case 12: + std::cerr + << "Unhandled custom type in StandardCodecSerializer::WriteValue. " + << "Custom types require codec extensions." << std::endl; + break; + case 13: { + WriteVector(std::get>(value), stream); + break; + } + } +} + +EncodableValue StandardCodecSerializer::ReadValueOfType( + uint8_t type, + ByteStreamReader* stream) const { + switch (static_cast(type)) { + case EncodedType::kNull: + return EncodableValue(); + case EncodedType::kTrue: + return EncodableValue(true); + case EncodedType::kFalse: + return EncodableValue(false); + case EncodedType::kInt32: + return EncodableValue(stream->ReadInt32()); + case EncodedType::kInt64: + return EncodableValue(stream->ReadInt64()); + case EncodedType::kFloat64: + stream->ReadAlignment(8); + return EncodableValue(stream->ReadDouble()); + case EncodedType::kLargeInt: + case EncodedType::kString: { + size_t size = ReadSize(stream); + std::string string_value; + string_value.resize(size); + stream->ReadBytes(reinterpret_cast(&string_value[0]), size); + return EncodableValue(string_value); + } + case EncodedType::kUInt8List: + return ReadVector(stream); + case EncodedType::kInt32List: + return ReadVector(stream); + case EncodedType::kInt64List: + return ReadVector(stream); + case EncodedType::kFloat64List: + return ReadVector(stream); + case EncodedType::kList: { + size_t length = ReadSize(stream); + EncodableList list_value; + list_value.reserve(length); + for (size_t i = 0; i < length; ++i) { + list_value.push_back(ReadValue(stream)); + } + return EncodableValue(list_value); + } + case EncodedType::kMap: { + size_t length = ReadSize(stream); + EncodableMap map_value; + for (size_t i = 0; i < length; ++i) { + EncodableValue key = ReadValue(stream); + EncodableValue value = ReadValue(stream); + map_value.emplace(std::move(key), std::move(value)); + } + return EncodableValue(map_value); + } + case EncodedType::kFloat32List: { + return ReadVector(stream); + } + } + std::cerr << "Unknown type in StandardCodecSerializer::ReadValueOfType: " + << static_cast(type) << std::endl; + return EncodableValue(); +} + +size_t StandardCodecSerializer::ReadSize(ByteStreamReader* stream) const { + uint8_t byte = stream->ReadByte(); + if (byte < 254) { + return byte; + } else if (byte == 254) { + uint16_t value = 0; + stream->ReadBytes(reinterpret_cast(&value), 2); + return value; + } else { + uint32_t value = 0; + stream->ReadBytes(reinterpret_cast(&value), 4); + return value; + } +} + +void StandardCodecSerializer::WriteSize(size_t size, + ByteStreamWriter* stream) const { + if (size < 254) { + stream->WriteByte(static_cast(size)); + } else if (size <= 0xffff) { + stream->WriteByte(254); + uint16_t value = static_cast(size); + stream->WriteBytes(reinterpret_cast(&value), 2); + } else { + stream->WriteByte(255); + uint32_t value = static_cast(size); + stream->WriteBytes(reinterpret_cast(&value), 4); + } +} + +template +EncodableValue StandardCodecSerializer::ReadVector( + ByteStreamReader* stream) const { + size_t count = ReadSize(stream); + std::vector vector; + vector.resize(count); + uint8_t type_size = static_cast(sizeof(T)); + if (type_size > 1) { + stream->ReadAlignment(type_size); + } + stream->ReadBytes(reinterpret_cast(vector.data()), + count * type_size); + return EncodableValue(vector); +} + +template +void StandardCodecSerializer::WriteVector(const std::vector vector, + ByteStreamWriter* stream) const { + size_t count = vector.size(); + WriteSize(count, stream); + if (count == 0) { + return; + } + uint8_t type_size = static_cast(sizeof(T)); + if (type_size > 1) { + stream->WriteAlignment(type_size); + } + stream->WriteBytes(reinterpret_cast(vector.data()), + count * type_size); +} + +// ===== standard_message_codec.h ===== + +// static +const StandardMessageCodec& StandardMessageCodec::GetInstance( + const StandardCodecSerializer* serializer) { + if (!serializer) { + serializer = &StandardCodecSerializer::GetInstance(); + } + static auto* sInstances = new std::map>; + auto it = sInstances->find(serializer); + if (it == sInstances->end()) { + // Uses new due to private constructor (to prevent API clients from + // accidentally passing temporary codec instances to channels). + auto emplace_result = sInstances->emplace( + serializer, std::unique_ptr( + new StandardMessageCodec(serializer))); + it = emplace_result.first; + } + return *(it->second); +} + +StandardMessageCodec::StandardMessageCodec( + const StandardCodecSerializer* serializer) + : serializer_(serializer) {} + +StandardMessageCodec::~StandardMessageCodec() = default; + +std::unique_ptr StandardMessageCodec::DecodeMessageInternal( + const uint8_t* binary_message, + size_t message_size) const { + if (!binary_message) { + return std::make_unique(); + } + ByteBufferStreamReader stream(binary_message, message_size); + return std::make_unique(serializer_->ReadValue(&stream)); +} + +std::unique_ptr> +StandardMessageCodec::EncodeMessageInternal( + const EncodableValue& message) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + serializer_->WriteValue(message, &stream); + return encoded; +} + +// ===== standard_method_codec.h ===== + +// static +const StandardMethodCodec& StandardMethodCodec::GetInstance( + const StandardCodecSerializer* serializer) { + if (!serializer) { + serializer = &StandardCodecSerializer::GetInstance(); + } + static auto* sInstances = new std::map>; + auto it = sInstances->find(serializer); + if (it == sInstances->end()) { + // Uses new due to private constructor (to prevent API clients from + // accidentally passing temporary codec instances to channels). + auto emplace_result = sInstances->emplace( + serializer, std::unique_ptr( + new StandardMethodCodec(serializer))); + it = emplace_result.first; + } + return *(it->second); +} + +StandardMethodCodec::StandardMethodCodec( + const StandardCodecSerializer* serializer) + : serializer_(serializer) {} + +StandardMethodCodec::~StandardMethodCodec() = default; + +std::unique_ptr> +StandardMethodCodec::DecodeMethodCallInternal(const uint8_t* message, + size_t message_size) const { + ByteBufferStreamReader stream(message, message_size); + EncodableValue method_name_value = serializer_->ReadValue(&stream); + const auto* method_name = std::get_if(&method_name_value); + if (!method_name) { + std::cerr << "Invalid method call; method name is not a string." + << std::endl; + return nullptr; + } + auto arguments = + std::make_unique(serializer_->ReadValue(&stream)); + return std::make_unique>(*method_name, + std::move(arguments)); +} + +std::unique_ptr> +StandardMethodCodec::EncodeMethodCallInternal( + const MethodCall& method_call) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + serializer_->WriteValue(EncodableValue(method_call.method_name()), &stream); + if (method_call.arguments()) { + serializer_->WriteValue(*method_call.arguments(), &stream); + } else { + serializer_->WriteValue(EncodableValue(), &stream); + } + return encoded; +} + +std::unique_ptr> +StandardMethodCodec::EncodeSuccessEnvelopeInternal( + const EncodableValue* result) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + stream.WriteByte(0); + if (result) { + serializer_->WriteValue(*result, &stream); + } else { + serializer_->WriteValue(EncodableValue(), &stream); + } + return encoded; +} + +std::unique_ptr> +StandardMethodCodec::EncodeErrorEnvelopeInternal( + const std::string& error_code, + const std::string& error_message, + const EncodableValue* error_details) const { + auto encoded = std::make_unique>(); + ByteBufferStreamWriter stream(encoded.get()); + stream.WriteByte(1); + serializer_->WriteValue(EncodableValue(error_code), &stream); + if (error_message.empty()) { + serializer_->WriteValue(EncodableValue(), &stream); + } else { + serializer_->WriteValue(EncodableValue(error_message), &stream); + } + if (error_details) { + serializer_->WriteValue(*error_details, &stream); + } else { + serializer_->WriteValue(EncodableValue(), &stream); + } + return encoded; +} + +bool StandardMethodCodec::DecodeAndProcessResponseEnvelopeInternal( + const uint8_t* response, + size_t response_size, + MethodResult* result) const { + ByteBufferStreamReader stream(response, response_size); + uint8_t flag = stream.ReadByte(); + switch (flag) { + case 0: { + EncodableValue value = serializer_->ReadValue(&stream); + if (value.IsNull()) { + result->Success(); + } else { + result->Success(value); + } + return true; + } + case 1: { + EncodableValue code = serializer_->ReadValue(&stream); + EncodableValue message = serializer_->ReadValue(&stream); + EncodableValue details = serializer_->ReadValue(&stream); + const std::string& message_string = + message.IsNull() ? "" : std::get(message); + if (details.IsNull()) { + result->Error(std::get(code), message_string); + } else { + result->Error(std::get(code), message_string, details); + } + return true; + } + default: + return false; + } +} + +} // namespace flutter diff --git a/linux/flutter/texture_registrar_impl.h b/linux/flutter/texture_registrar_impl.h new file mode 100644 index 0000000000..8659b73e65 --- /dev/null +++ b/linux/flutter/texture_registrar_impl.h @@ -0,0 +1,42 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_TEXTURE_REGISTRAR_IMPL_H_ +#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_TEXTURE_REGISTRAR_IMPL_H_ + +#include "include/flutter/texture_registrar.h" + +struct FlTextureProxy; + +namespace flutter { + +// Wrapper around a FlTextureRegistrar that implements the +// TextureRegistrar API. +class TextureRegistrarImpl : public TextureRegistrar { + public: + explicit TextureRegistrarImpl(FlTextureRegistrar* texture_registrar_ref); + virtual ~TextureRegistrarImpl(); + + // Prevent copying. + TextureRegistrarImpl(TextureRegistrarImpl const&) = delete; + TextureRegistrarImpl& operator=(TextureRegistrarImpl const&) = delete; + + // |flutter::TextureRegistrar| + int64_t RegisterTexture(TextureVariant* texture) override; + + // |flutter::TextureRegistrar| + bool MarkTextureFrameAvailable(int64_t texture_id) override; + + // |flutter::TextureRegistrar| + bool UnregisterTexture(int64_t texture_id) override; + + private: + // Handle for interacting with the C API. + FlTextureRegistrar* texture_registrar_ref_; + std::map textures_; +}; + +} // namespace flutter + +#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_TEXTURE_REGISTRAR_IMPL_H_ diff --git a/linux/flutter_webrtc/flutter_web_r_t_c_plugin.h b/linux/flutter_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..cee9ce7b68 --- /dev/null +++ b/linux/flutter_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,31 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include +G_BEGIN_DECLS + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + +namespace flutter_webrtc_plugin { +class FlutterWebRTC; +} // namespace flutter_webrtc_plugin + +typedef struct _FlutterWebrtcPlugin FlutterWebrtcPlugin; +typedef struct { + GObjectClass parent_class; +} FlutterWebrtcPluginClass; + +FLUTTER_PLUGIN_EXPORT GType flutter_webrtc_plugin_get_type(); + +FLUTTER_PLUGIN_EXPORT void flutter_web_r_t_c_plugin_register_with_registrar( + FlPluginRegistrar* registrar); + +FLUTTER_PLUGIN_EXPORT flutter_webrtc_plugin::FlutterWebRTC* flutter_webrtc_plugin_get_shared_instance(); + +G_END_DECLS + +#endif // PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ diff --git a/linux/flutter_webrtc_plugin.cc b/linux/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..b2a2a847fe --- /dev/null +++ b/linux/flutter_webrtc_plugin.cc @@ -0,0 +1,83 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" +#include "task_runner_linux.h" + +const char* kChannelName = "FlutterWebRTC.Method"; +static flutter_webrtc_plugin::FlutterWebRTC* g_shared_instance = nullptr; +//#if defined(_WINDOWS) + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + TaskRunner* task_runner() { return task_runner_.get(); } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()), + task_runner_(std::make_unique()) { + webrtc_ = std::make_unique(this); + g_shared_instance = webrtc_.get(); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; + std::unique_ptr task_runner_; +}; + +} // namespace flutter_webrtc_plugin + +void flutter_web_r_t_c_plugin_register_with_registrar( + FlPluginRegistrar* registrar) { + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + plugin_registrar); +} + +flutter_webrtc_plugin::FlutterWebRTC* flutter_webrtc_plugin_get_shared_instance() { + return g_shared_instance; +} \ No newline at end of file diff --git a/linux/task_runner_linux.cc b/linux/task_runner_linux.cc new file mode 100644 index 0000000000..1c8a3f3240 --- /dev/null +++ b/linux/task_runner_linux.cc @@ -0,0 +1,31 @@ +#include "task_runner_linux.h" + +#include + +namespace flutter_webrtc_plugin { + +void TaskRunnerLinux::EnqueueTask(TaskClosure task) { + { + std::lock_guard lock(tasks_mutex_); + tasks_.push(std::move(task)); + } + + GMainContext* context = g_main_context_default(); + if (context) { + g_main_context_invoke( + context, + [](gpointer user_data) -> gboolean { + TaskRunnerLinux* runner = static_cast(user_data); + std::lock_guard lock(runner->tasks_mutex_); + while (!runner->tasks_.empty()) { + TaskClosure task = std::move(runner->tasks_.front()); + runner->tasks_.pop(); + task(); + } + return G_SOURCE_REMOVE; + }, + this); + } +} + +} // namespace flutter_webrtc_plugin diff --git a/linux/task_runner_linux.h b/linux/task_runner_linux.h new file mode 100644 index 0000000000..cff94639f7 --- /dev/null +++ b/linux/task_runner_linux.h @@ -0,0 +1,26 @@ +#ifndef PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ +#define PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ + +#include +#include +#include +#include "task_runner.h" + +namespace flutter_webrtc_plugin { + +class TaskRunnerLinux : public TaskRunner { + public: + TaskRunnerLinux() = default; + ~TaskRunnerLinux() override = default; + + // TaskRunner implementation. + void EnqueueTask(TaskClosure task) override; + + private: + std::mutex tasks_mutex_; + std::queue tasks_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ \ No newline at end of file diff --git a/macos/Classes/AudioManager.h b/macos/Classes/AudioManager.h new file mode 120000 index 0000000000..1c21c3e586 --- /dev/null +++ b/macos/Classes/AudioManager.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.h \ No newline at end of file diff --git a/macos/Classes/AudioManager.m b/macos/Classes/AudioManager.m new file mode 120000 index 0000000000..bbcfb519ed --- /dev/null +++ b/macos/Classes/AudioManager.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioManager.m \ No newline at end of file diff --git a/macos/Classes/AudioProcessingAdapter.h b/macos/Classes/AudioProcessingAdapter.h new file mode 120000 index 0000000000..f3048db2f1 --- /dev/null +++ b/macos/Classes/AudioProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.h \ No newline at end of file diff --git a/macos/Classes/AudioProcessingAdapter.m b/macos/Classes/AudioProcessingAdapter.m new file mode 120000 index 0000000000..803efdda50 --- /dev/null +++ b/macos/Classes/AudioProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioProcessingAdapter.m \ No newline at end of file diff --git a/macos/Classes/AudioUtils.h b/macos/Classes/AudioUtils.h new file mode 120000 index 0000000000..efc6c758c8 --- /dev/null +++ b/macos/Classes/AudioUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.h \ No newline at end of file diff --git a/macos/Classes/AudioUtils.m b/macos/Classes/AudioUtils.m new file mode 120000 index 0000000000..5023efe9fd --- /dev/null +++ b/macos/Classes/AudioUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/AudioUtils.m \ No newline at end of file diff --git a/macos/Classes/CameraUtils.h b/macos/Classes/CameraUtils.h new file mode 120000 index 0000000000..a31c2baab2 --- /dev/null +++ b/macos/Classes/CameraUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.h \ No newline at end of file diff --git a/macos/Classes/CameraUtils.m b/macos/Classes/CameraUtils.m new file mode 120000 index 0000000000..336e1ea963 --- /dev/null +++ b/macos/Classes/CameraUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.m \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.h b/macos/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.m b/macos/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDesktopCapturer.h b/macos/Classes/FlutterRTCDesktopCapturer.h new file mode 120000 index 0000000000..eff4773160 --- /dev/null +++ b/macos/Classes/FlutterRTCDesktopCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDesktopCapturer.m b/macos/Classes/FlutterRTCDesktopCapturer.m new file mode 120000 index 0000000000..5388e628f4 --- /dev/null +++ b/macos/Classes/FlutterRTCDesktopCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDesktopCapturer.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCryptor.h b/macos/Classes/FlutterRTCFrameCryptor.h new file mode 120000 index 0000000000..ad3e0de33e --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCryptor.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCryptor.m b/macos/Classes/FlutterRTCFrameCryptor.m new file mode 120000 index 0000000000..bd62d3db56 --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCryptor.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCryptor.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/macos/Classes/LocalAudioTrack.h b/macos/Classes/LocalAudioTrack.h new file mode 120000 index 0000000000..421b56b2af --- /dev/null +++ b/macos/Classes/LocalAudioTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.h \ No newline at end of file diff --git a/macos/Classes/LocalAudioTrack.m b/macos/Classes/LocalAudioTrack.m new file mode 120000 index 0000000000..71fa724d15 --- /dev/null +++ b/macos/Classes/LocalAudioTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalAudioTrack.m \ No newline at end of file diff --git a/macos/Classes/LocalTrack.h b/macos/Classes/LocalTrack.h new file mode 120000 index 0000000000..7d41789949 --- /dev/null +++ b/macos/Classes/LocalTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalTrack.h \ No newline at end of file diff --git a/macos/Classes/LocalVideoTrack.h b/macos/Classes/LocalVideoTrack.h new file mode 120000 index 0000000000..5069f7dd17 --- /dev/null +++ b/macos/Classes/LocalVideoTrack.h @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.h \ No newline at end of file diff --git a/macos/Classes/LocalVideoTrack.m b/macos/Classes/LocalVideoTrack.m new file mode 120000 index 0000000000..182490a4fb --- /dev/null +++ b/macos/Classes/LocalVideoTrack.m @@ -0,0 +1 @@ +../../common/darwin/Classes/LocalVideoTrack.m \ No newline at end of file diff --git a/macos/Classes/VideoProcessingAdapter.h b/macos/Classes/VideoProcessingAdapter.h new file mode 120000 index 0000000000..d93141230c --- /dev/null +++ b/macos/Classes/VideoProcessingAdapter.h @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.h \ No newline at end of file diff --git a/macos/Classes/VideoProcessingAdapter.m b/macos/Classes/VideoProcessingAdapter.m new file mode 120000 index 0000000000..c80ad1ca73 --- /dev/null +++ b/macos/Classes/VideoProcessingAdapter.m @@ -0,0 +1 @@ +../../common/darwin/Classes/VideoProcessingAdapter.m \ No newline at end of file diff --git a/macos/flutter_webrtc.podspec b/macos/flutter_webrtc.podspec new file mode 100644 index 0000000000..7baf7ea75f --- /dev/null +++ b/macos/flutter_webrtc.podspec @@ -0,0 +1,20 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'flutter_webrtc' + s.version = '0.14.0' + s.summary = 'Flutter WebRTC plugin for macOS.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'https://github.com/cloudwebrtc/flutter-webrtc' + s.license = { :file => '../LICENSE' } + s.author = { 'CloudWebRTC' => 'duanweiwei1982@gmail.com' } + s.source = { :path => '.' } + s.source_files = ['Classes/**/*'] + + s.dependency 'FlutterMacOS' + s.dependency 'WebRTC-SDK', '125.6422.07' + s.osx.deployment_target = '10.14' +end diff --git a/pubspec.yaml b/pubspec.yaml index 8ac3a77848..a0c02d06ee 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,53 +1,41 @@ -name: webrtc -description: Flutter WebRTC plugin for iOS/Android. -version: 0.0.1 -author: cloudwebrtc +name: flutter_webrtc +description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC. +version: 0.14.2 homepage: https://github.com/cloudwebrtc/flutter-webrtc +environment: + sdk: ">=3.3.0 <4.0.0" + flutter: ">=1.22.0" dependencies: + collection: ^1.17.0 + dart_webrtc: ^1.5.3+hotfix.2 flutter: sdk: flutter + path_provider: ^2.0.2 + web: ^1.0.0 + webrtc_interface: ^1.2.2+hotfix.2 -# For information on the generic Dart part of this file, see the -# following page: https://www.dartlang.org/tools/pub/pubspec +dev_dependencies: + flutter_test: + sdk: flutter + import_sorter: ^4.6.0 + lints: ^4.0.0 + pedantic: ^1.11.1 + test: any -# The following section is specific to Flutter. flutter: plugin: - androidPackage: com.cloudwebrtc.webrtc - pluginClass: FlutterWebRTCPlugin - -environment: - sdk: ">=2.0.0-dev.35.0 <3.0.0" - flutter: ">=0.2.3 <2.0.0" - - # To add assets to your plugin package, add an assets section, like this: - # assets: - # - images/a_dot_burr.jpeg - # - images/a_dot_ham.jpeg - # - # For details regarding assets in packages, see - # https://flutter.io/assets-and-images/#from-packages - # - # An image asset can refer to one or more resolution-specific "variants", see - # https://flutter.io/assets-and-images/#resolution-aware. - - # To add custom fonts to your plugin package, add a fonts section here, - # in this "flutter" section. Each entry in this list should have a - # "family" key with the font family name, and a "fonts" key with a - # list giving the asset and other descriptors for the font. For - # example: - # fonts: - # - family: Schyler - # fonts: - # - asset: fonts/Schyler-Regular.ttf - # - asset: fonts/Schyler-Italic.ttf - # style: italic - # - family: Trajan Pro - # fonts: - # - asset: fonts/TrajanPro.ttf - # - asset: fonts/TrajanPro_Bold.ttf - # weight: 700 - # - # For details regarding fonts in packages, see - # https://flutter.io/custom-fonts/#from-packages + platforms: + android: + package: com.cloudwebrtc.webrtc + pluginClass: FlutterWebRTCPlugin + ios: + pluginClass: FlutterWebRTCPlugin + macos: + pluginClass: FlutterWebRTCPlugin + windows: + pluginClass: FlutterWebRTCPlugin + linux: + pluginClass: FlutterWebRTCPlugin + elinux: + pluginClass: FlutterWebRTCPlugin diff --git a/renovate.json b/renovate.json new file mode 100644 index 0000000000..f45d8f110c --- /dev/null +++ b/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base" + ] +} diff --git a/test/unit/rtc_peerconnection_test.dart b/test/unit/rtc_peerconnection_test.dart new file mode 100644 index 0000000000..d7f86d9302 --- /dev/null +++ b/test/unit/rtc_peerconnection_test.dart @@ -0,0 +1,84 @@ +import 'package:flutter/services.dart'; + +import 'package:flutter_test/flutter_test.dart'; + +import 'package:flutter_webrtc/src/native/rtc_data_channel_impl.dart'; +import 'package:flutter_webrtc/src/native/rtc_peerconnection_impl.dart'; + +void main() { + TestWidgetsFlutterBinding.ensureInitialized(); + final channel = MethodChannel('FlutterWebRTC.Method'); + setUp(() { + channel.setMockMethodCallHandler((MethodCall methodCall) async { + await ServicesBinding.instance.defaultBinaryMessenger + .handlePlatformMessage( + 'FlutterWebRTC/peerConnectionEvent', null, (ByteData? data) {}); + await ServicesBinding.instance.defaultBinaryMessenger + .handlePlatformMessage( + 'FlutterWebRTC/dataChannelEvent', null, (ByteData? data) {}); + }); + }); + + tearDown(() { + channel.setMockMethodCallHandler(null); + }); + + test( + 'Validate that not setting any public delegate this will not break the implementation by throwing NPE', + () { + final pc = RTCPeerConnectionNative('', {}); + final events = [ + 'signalingState', + 'iceGatheringState', + 'iceConnectionState', + 'onCandidate', + 'onAddStream', + 'onRemoveStream', + 'onAddTrack', + 'onRemoveTrack', + 'didOpenDataChannel', + 'onRenegotiationNeeded' + ]; + + pc.onDataChannel = (dc) { + final channel = dc as RTCDataChannelNative; + channel.eventListener({ + 'event': 'dataChannelStateChanged', + 'id': 0, + 'flutterId': '', + 'state': 'open' + }); + }; + + for (var event in events) { + pc.eventListener({ + 'event': event, + + //Minimum values for signalingState, iceGatheringState, iceConnectionState + 'state': 'stable', // just picking one valid value from the list + + //Minimum values for onCandidate + 'candidate': {'candidate': '', 'sdpMid': '', 'sdpMLineIndex': 1}, + + //Minimum values for onAddStream + 'streamId': '', + 'audioTracks': [], + 'videoTracks': [], + + //Minimum values for onRemoveTrack + 'trackId': '', + + //Minimum values for onAddTrack + 'track': { + 'id': '', + 'label': '', + 'kind': '', + 'enabled': false, + }, + 'id': 0, + 'label': '', + 'flutterId': '', + }); + } + }); +} diff --git a/test/unit/web/rtc_videw_view_test.dart b/test/unit/web/rtc_videw_view_test.dart new file mode 100644 index 0000000000..9b451cf77c --- /dev/null +++ b/test/unit/web/rtc_videw_view_test.dart @@ -0,0 +1,16 @@ +@TestOn('browser') +library; + +import 'package:flutter_test/flutter_test.dart'; + +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +void main() { + // TODO(wer-mathurin): should revisit after this bug is resolved, https://github.com/flutter/flutter/issues/66045. + test('should complete succesfully', () async { + var renderer = RTCVideoRenderer(); + await renderer.initialize(); + renderer.srcObject = await MediaDevices.getUserMedia({}); + await renderer.dispose(); + }); +} diff --git a/third_party/libwebrtc/include/base/atomicops.h b/third_party/libwebrtc/include/base/atomicops.h new file mode 100644 index 0000000000..4a3b79916a --- /dev/null +++ b/third_party/libwebrtc/include/base/atomicops.h @@ -0,0 +1,75 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LIB_WEBRTC_ATOMICOPS_H_ +#define LIB_WEBRTC_ATOMICOPS_H_ + +#if defined(WIN32) || defined(_WINDOWS) +// Include winsock2.h before including to maintain consistency with +// win32.h. We can't include win32.h directly here since it pulls in +// headers such as basictypes.h which causes problems in Chromium where webrtc +// exists as two separate projects, webrtc and libjingle. +#include +#endif // defined(WIN32) + +namespace libwebrtc { +class AtomicOps { + public: +#if defined(WIN32) || defined(_WINDOWS) + // Assumes sizeof(int) == sizeof(LONG), which it is on Win32 and Win64. + static int Increment(volatile int* i) { + return ::InterlockedIncrement(reinterpret_cast(i)); + } + static int Decrement(volatile int* i) { + return ::InterlockedDecrement(reinterpret_cast(i)); + } + static int AcquireLoad(volatile const int* i) { return *i; } + static void ReleaseStore(volatile int* i, int value) { *i = value; } + static int CompareAndSwap(volatile int* i, int old_value, int new_value) { + return ::InterlockedCompareExchange(reinterpret_cast(i), + new_value, old_value); + } + // Pointer variants. + template + static T* AcquireLoadPtr(T* volatile* ptr) { + return *ptr; + } + template + static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) { + return static_cast(::InterlockedCompareExchangePointer( + reinterpret_cast(ptr), new_value, old_value)); + } +#else + static int Increment(volatile int* i) { return __sync_add_and_fetch(i, 1); } + static int Decrement(volatile int* i) { return __sync_sub_and_fetch(i, 1); } + static int AcquireLoad(volatile const int* i) { + return __atomic_load_n(i, __ATOMIC_ACQUIRE); + } + static void ReleaseStore(volatile int* i, int value) { + __atomic_store_n(i, value, __ATOMIC_RELEASE); + } + static int CompareAndSwap(volatile int* i, int old_value, int new_value) { + return __sync_val_compare_and_swap(i, old_value, new_value); + } + // Pointer variants. + template + static T* AcquireLoadPtr(T* volatile* ptr) { + return __atomic_load_n(ptr, __ATOMIC_ACQUIRE); + } + template + static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) { + return __sync_val_compare_and_swap(ptr, old_value, new_value); + } +#endif +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_ATOMICOPS_H_ diff --git a/third_party/libwebrtc/include/base/fixed_size_function.h b/third_party/libwebrtc/include/base/fixed_size_function.h new file mode 100644 index 0000000000..1070b45232 --- /dev/null +++ b/third_party/libwebrtc/include/base/fixed_size_function.h @@ -0,0 +1,296 @@ +// +// Copyright (c) 2014-2016 Pavel Medvedev. All rights reserved. +// +// Distributed under the MIT software license, see the accompanying +// file LICENSE + +#ifndef FIXED_SIZE_FUNCTION_HPP_INCLUDED +#define FIXED_SIZE_FUNCTION_HPP_INCLUDED + +#include +#include +#include +#include + +enum class construct_type { + none, + copy, + move, + copy_and_move, +}; + +namespace details { + +// V-table implementation +template +struct fixed_function_vtable_base { + Ret (*call)(void*, Args&&...) = nullptr; + void (*destroy)(void*) = nullptr; +}; + +template +struct fixed_function_vtable; + +template +struct fixed_function_vtable + : fixed_function_vtable_base {}; + +template +struct fixed_function_vtable + : fixed_function_vtable_base { + void (*copy)(const void*, void*) = nullptr; +}; + +template +struct fixed_function_vtable + : fixed_function_vtable_base { + void (*move)(void*, void*) = nullptr; +}; + +template +struct fixed_function_vtable + : fixed_function_vtable_base { + void (*copy)(const void*, void*) = nullptr; + void (*move)(void*, void*) = nullptr; +}; + +} // namespace details + +template +class fixed_size_function; + +template +class fixed_size_function { + public: + // Compile-time information + + using is_copyable = + std::integral_constant; + using is_movable = + std::integral_constant; + + using result_type = Ret; + + static const std::size_t arity = sizeof...(Args); + + template + struct argument { + static_assert(N < arity, "invalid argument index"); + using type = typename std::tuple_element>::type; + }; + + public: + template + fixed_size_function(fixed_size_function const&) = delete; + template + fixed_size_function(fixed_size_function&) = delete; + template + fixed_size_function(fixed_size_function&&) = delete; + template + fixed_size_function& operator=(fixed_size_function const&) = delete; + template + fixed_size_function& operator=(fixed_size_function&) = delete; + template + fixed_size_function& operator=(fixed_size_function&&) = delete; + template + void assign(fixed_size_function const&) = delete; + template + void assign(fixed_size_function&) = delete; + template + void assign(fixed_size_function&&) = delete; + + fixed_size_function() {} + + ~fixed_size_function() { reset(); } + + fixed_size_function(std::nullptr_t) {} + + fixed_size_function& operator=(std::nullptr_t) { + reset(); + return *this; + } + + fixed_size_function(fixed_size_function const& src) { copy(src); } + + fixed_size_function& operator=(fixed_size_function const& src) { + assign(src); + return *this; + } + + fixed_size_function(fixed_size_function& src) { copy(src); } + + fixed_size_function& operator=(fixed_size_function& src) { + assign(src); + return *this; + } + + fixed_size_function(fixed_size_function&& src) { + move(std::move(src), is_movable()); + } + + fixed_size_function& operator=(fixed_size_function&& src) { + assign(std::move(src)); + return *this; + } + + template + fixed_size_function(Functor&& f) { + create(std::forward(f)); + } + + template + fixed_size_function& operator=(Functor&& f) { + assign(std::forward(f)); + return *this; + } + + void assign(fixed_size_function const& src) { + reset(); + copy(src); + } + + void assign(fixed_size_function& src) { + reset(); + copy(src); + } + + void assign(fixed_size_function&& src) { + reset(); + move(std::move(src), is_movable()); + } + + template + void assign(Functor&& f) { + reset(); + create(std::forward(f)); + } + + void reset() { + auto destroy = vtable_.destroy; + if (destroy) { + vtable_ = vtable(); + destroy(&storage_); + } + } + + explicit operator bool() const { return vtable_.call != nullptr; } + + Ret operator()(Args... args) { + return vtable_.call ? vtable_.call(&storage_, std::forward(args)...) + : (Ret) nullptr; + } + + void swap(fixed_size_function& other) { + fixed_size_function tmp = std::move(other); + other = std::move(*this); + *this = std::move(tmp); + } + + friend void swap(fixed_size_function& lhs, fixed_size_function& rhs) { + lhs.swap(rhs); + } + + friend bool operator==(std::nullptr_t, fixed_size_function const& f) { + return !f; + } + + friend bool operator==(fixed_size_function const& f, std::nullptr_t) { + return !f; + } + + friend bool operator!=(std::nullptr_t, fixed_size_function const& f) { + return f; + } + + friend bool operator!=(fixed_size_function const& f, std::nullptr_t) { + return f; + } + + private: + template + void create(Functor&& f) { + using functor_type = typename std::decay::type; + static_assert(sizeof(functor_type) <= StorageSize, + "Functor must be smaller than storage buffer"); + + new (&storage_) functor_type(std::forward(f)); + + vtable_.call = &call_impl; + vtable_.destroy = &destroy_impl; + init_copy(is_copyable()); + init_move(is_movable()); + } + + void copy(fixed_size_function const& src) { + if (src.vtable_.copy) { + src.vtable_.copy(&src.storage_, &storage_); + vtable_ = src.vtable_; + } + } + + void move(fixed_size_function&& src, std::true_type movable) { + if (src.vtable_.move) { + src.vtable_.move(&src.storage_, &storage_); + vtable_ = src.vtable_; + src.reset(); + } + } + + void move(fixed_size_function const& src, std::false_type movable) { + copy(src); + } + + private: + template + static Ret call_impl(void* functor, Args&&... args) { + return (*static_cast(functor))(std::forward(args)...); + } + + template + static void destroy_impl(void* functor) { + static_cast(functor)->~Functor(); + } + + template + static void copy_impl(void const* functor, void* dest) { + new (dest) Functor(*static_cast(functor)); + } + + template + static void move_impl(void* functor, void* dest) { + new (dest) Functor(std::move(*static_cast(functor))); + } + + template + void init_copy(std::true_type /*copyable*/) { + vtable_.copy = ©_impl; + } + + template + void init_copy(std::false_type /*copyable*/) {} + + template + void init_move(std::true_type /*movable*/) { + vtable_.move = &move_impl; + } + + template + void init_move(std::false_type /*movable*/) {} + + private: + using vtable = + details::fixed_function_vtable; + static const size_t StorageSize = MaxSize - sizeof(vtable); + using storage = typename std::aligned_storage::type; + + vtable vtable_; + storage storage_; +}; + +#endif // FIXED_SIZE_FUNCTION_HPP_INCLUDED \ No newline at end of file diff --git a/third_party/libwebrtc/include/base/portable.h b/third_party/libwebrtc/include/base/portable.h new file mode 100644 index 0000000000..b403af9cd3 --- /dev/null +++ b/third_party/libwebrtc/include/base/portable.h @@ -0,0 +1,439 @@ +#ifndef INFINISPAN_HOTROD_PORTABLE_H +#define INFINISPAN_HOTROD_PORTABLE_H + +#ifdef LIB_WEBRTC_API_EXPORTS +#define LIB_PORTABLE_API __declspec(dllexport) +#elif defined(LIB_WEBRTC_API_DLL) +#define LIB_PORTABLE_API __declspec(dllimport) +#elif !defined(WIN32) +#define LIB_PORTABLE_API __attribute__((visibility("default"))) +#else +#define LIB_PORTABLE_API +#endif + +#include +#include +#include +#include +#include + +/** + * This file defines structures that can be passed across shared library/DLL + * boundary. + * + * Besides memory layout, the class must be destroyed in the same library as + * created. None of these classes is thread-safe. The classes are not optimized + * for performance. + */ + +namespace portable { + +#ifdef _MSC_VER +#define strncpy_safe strncpy_s +#else +#ifndef _TRUNCATE +#define _TRUNCATE ((size_t)-1) +#endif // _TRUNCATE +#endif + +#define PORTABLE_STRING_BUF_SIZE 48 + +class string { + private: + char m_buf[PORTABLE_STRING_BUF_SIZE]; + char* m_dynamic; + size_t m_length; + + public: + LIB_PORTABLE_API string(); + LIB_PORTABLE_API void init(const char* str, size_t len); + LIB_PORTABLE_API void destroy(); + + inline string(const char* str) { init(str, strlen(str)); } + + inline string(const std::string& str) { init(str.c_str(), str.length()); } + + inline string(const string& o) { + init(o.m_dynamic == 0 ? o.m_buf : o.m_dynamic, o.m_length); + } + + inline string& operator=(const string& o) { + destroy(); + init(o.m_dynamic == 0 ? o.m_buf : o.m_dynamic, o.m_length); + return *this; + } + + LIB_PORTABLE_API ~string(); + + inline string& operator=(const std::string& str) { + destroy(); + init(str.c_str(), str.length()); + return *this; + } + + inline size_t size() { return m_length; } + + inline const char* c_string() const { + return m_dynamic == 0 ? m_buf : m_dynamic; + } + + inline std::string std_string() const { + return std::string(m_dynamic == 0 ? m_buf : m_dynamic, m_length); + } +}; + +inline std::string to_std_string(const string& str) { return str.std_string(); } + +template +class identity { + T operator()(const T& x) { return x; } +}; + +template +class vector { + protected: + using raw_type = typename std::aligned_storage::type; + + private: + T* m_array; + size_t m_size; + + public: + class move_ref { + friend class vector; + + private: + vector& m_ref; + move_ref(vector& ref) : m_ref(ref) {} + }; + + vector() : m_array(0), m_size(0) {} + vector(T* array, size_t s) : m_array(array), m_size(s) {} + + template + vector(const Iterable& v) { + m_size = v.size(); + if (v.size() == 0) { + m_array = 0; + } else { + m_array = new T[v.size()]; + size_t i = 0; + for (typename Iterable::const_iterator it = v.begin(); it != v.end(); + ++it) { + m_array[i++] = *it; + } + } + } + + template + vector(const Iterable& v, Converter convert) { + m_size = v.size(); + if (v.size() == 0) { + m_array = 0; + } else { + m_array = new T[v.size()]; + size_t i = 0; + for (typename Iterable::const_iterator it = v.begin(); it != v.end(); + ++it) { + m_array[i++] = convert(*it); + } + } + } + + vector(const vector& o) { + m_size = o.m_size; + if (m_size != 0) { + m_array = new T[o.m_size]; + for (size_t i = 0; i < o.m_size; ++i) { + m_array[i] = o.m_array[i]; + } + } + } + + ~vector() { destroy_all(); } + + vector& operator=(const vector& o) { + if (m_size < o.m_size) { + destroy_all(); + m_array = new T[o.m_size]; + } else if (o.m_size == 0 && m_size != 0) { + destroy_all(); + } + m_size = o.m_size; + for (size_t i = 0; i < o.m_size; ++i) { + m_array[i] = o.m_array[i]; + } + return *this; + } + + vector(move_ref mr) : m_array(mr.m_ref.m_array), m_size(mr.m_ref.m_size) {} + vector& operator=(move_ref mr) { + if (m_size != 0) { + destroy_all(); + } + m_size = mr.m_ref.m_size; + m_array = mr.m_ref.m_array; + mr.m_ref.m_size = 0; + mr.m_ref.m_array = 0; + return *this; + } + /** + * Not really safe - can't be used as vector(something).move(), + * but vector tmp(something); other = tmp.move(); + */ + move_ref move() { return move_ref(*this); } + + std::vector std_vector() const { + std::vector v; + v.reserve(m_size); + for (size_t i = 0; i < m_size; ++i) { + v.push_back(m_array[i]); + } + return v; + } + + const T* data() const { return m_array; } + + size_t size() const { return m_size; } + + T& operator[](size_t i) { return m_array[i]; } + + const T& operator[](size_t i) const { return m_array[i]; } + + void clear() { destroy_all(); } + + protected: + void destroy(T* rt) { reinterpret_cast(rt)->~T(); } + + void destroy_all() { + for (size_t i = 0; i < m_size; ++i) { + destroy(&m_array[i]); + } + m_size = 0; + } +}; + +template +class pair { + public: + K key; + V value; +}; + +template +class map { + private: + typedef pair my_pair; + + vector m_vec; + + /*template + static pair *to_array(const std::map &m, + K (*convertKey)(const K2 &), + V (*convertValue)(const V2 &)) + { + my_pair *data = new my_pair[m.size()]; + my_pair *dp = data; + for (std::map::const_iterator it = m.begin(); it != m.end(); ++it) + { dp->key = convertKey(it->first); dp->value = convertValue(it->second); + ++dp; + } + return data; + }*/ + + template + static my_pair* to_array(const std::map& m, KC convertKey, + VC convertValue) { + my_pair* data = new my_pair[m.size()]; + my_pair* dp = data; + for (typename std::map::const_iterator it = m.begin(); + it != m.end(); ++it) { + dp->key = convertKey(it->first); + dp->value = convertValue(it->second); + ++dp; + } + return data; + } + + public: + class move_ref { + friend class map; + + private: + map& m_ref; + move_ref(map& ref) : m_ref(ref) {} + }; + + map() {} + + /* template map(const std::map &m, + K (*convertKey)(const K2 &) = identity, + V (*convertValue)(const V2 &) = identity): + m_vec(to_array(m, convertKey, convertValue), m.size()) {}*/ + + map(const std::map& m) + : m_vec(to_array(m, identity(), identity()), m.size()) {} + + template + map(const std::map& m, KC convertKey = identity(), + VC convertValue = identity()) + : m_vec(to_array(m, convertKey, convertValue), m.size()) {} + + map(const map& o) { m_vec = o.m_vec; } + + map& operator=(const map& o) { + m_vec = o.m_vec; + return *this; + } + + map(move_ref mr) : m_vec(mr.m_ref.m_vec.move()) {} + map& operator=(move_ref mr) { + m_vec = mr.m_ref.m_vec.move(); + return *this; + } + move_ref move() { return move_ref(*this); } + + std::map std_map() const { + std::map m; + for (size_t i = 0; i < m_vec.size(); ++i) { + const my_pair* dp = m_vec.data() + i; + m[dp->key] = dp->value; + } + return m; + } + + template + std::map std_map(KC convertKey, VC convertValue) const { + std::map m; + for (size_t i = 0; i < m_vec.size(); ++i) { + const my_pair* dp = m_vec.data() + i; + m[convertKey(dp->key)] = convertValue(dp->value); + } + return m; + } + + template + const my_pair* get(K2 key, int (*cmp)(K2, const K&)) const { + for (size_t i = 0; i < m_vec.size(); ++i) { + const my_pair* dp = m_vec.data() + i; + if (!cmp(key, dp->key)) return dp; + } + return 0; + } + + const my_pair* data() const { return m_vec.data(); } + + size_t size() const { return m_vec.size(); } +}; + +/* Invasive reference counting */ +template +class counting_ptr; + +class counted_object { + template + friend class counting_ptr; + + private: + int m_counter; + + public: + counted_object() : m_counter(0) {} + virtual ~counted_object() {} +}; + +template +class counted_wrapper : public counted_object { + private: + T m_object; + + public: + counted_wrapper(const T& o) : m_object(o) {} + T& operator()() { return m_object; } +}; + +template +class counting_ptr { + public: + typedef void (*destroy)(T*); + + private: + counted_object* m_ptr; + destroy m_destroy; + + inline void dec_and_destroy() { + if (m_ptr != 0 && --(m_ptr->m_counter) == 0) { + if (m_destroy == 0) { + delete m_ptr; + } else { + m_destroy((T*)m_ptr); + } + } + } + + public: + counting_ptr() : m_ptr(0), m_destroy(0) {} + counting_ptr(T* obj, destroy d = 0) : m_ptr(obj), m_destroy(d) { + counted_object* rc = obj; // no cast required + if (rc != 0) { + rc->m_counter++; + } + } + ~counting_ptr() { dec_and_destroy(); } + counting_ptr(const counting_ptr& o) : m_ptr(o.m_ptr), m_destroy(o.m_destroy) { + if (m_ptr != 0) { + m_ptr->m_counter++; + } + } + counting_ptr& operator=(const counting_ptr& o) { + dec_and_destroy(); + m_ptr = o.m_ptr; + m_destroy = o.m_destroy; + if (m_ptr != 0) { + m_ptr->m_counter++; + } + return *this; + } + counting_ptr& operator=(T* rc) { return reset(rc, 0); } + counting_ptr& reset(T* rc, destroy d) { + dec_and_destroy(); + m_ptr = rc; + m_destroy = d; + if (rc != 0) { + rc->m_counter++; + } + return *this; + } + T* get() { return (T*)m_ptr; } + const T* get() const { return (T*)m_ptr; } + T* operator->() { return (T*)m_ptr; } + const T* operator->() const { return (const T*)m_ptr; } +}; + +template +class local_ptr { + private: + typedef void (*destroy)(T*); + T* m_ptr; + destroy m_destroy; + + public: + local_ptr() : m_ptr(0), m_destroy(0) {} + local_ptr(const local_ptr&) + : m_ptr(0), m_destroy(0) {} // copying does not persist value + local_ptr& operator=(const local_ptr&) { return *this; } + ~local_ptr() { + if (m_ptr) m_destroy(m_ptr); + } + const T* get() const { return m_ptr; } + T* get() { return m_ptr; } + void set(T* ptr, void (*dtor)(T*)) { + if (m_ptr) m_destroy(m_ptr); + m_ptr = ptr; + m_destroy = dtor; + } +}; + +} // namespace portable + +#endif // INFINISPAN_HOTROD_PORTABLE_H \ No newline at end of file diff --git a/third_party/libwebrtc/include/base/refcount.h b/third_party/libwebrtc/include/base/refcount.h new file mode 100644 index 0000000000..6e3360e87b --- /dev/null +++ b/third_party/libwebrtc/include/base/refcount.h @@ -0,0 +1,29 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef LIB_WEBRTC_REFCOUNT_H_ +#define LIB_WEBRTC_REFCOUNT_H_ + +#include "refcountedobject.h" + +namespace libwebrtc { + +// Reference count interface. +class RefCountInterface { + public: + virtual int AddRef() const = 0; + virtual int Release() const = 0; + + protected: + virtual ~RefCountInterface() {} +}; + +} // namespace libwebrtc + +#endif // WEBRTC_BASE_REFCOUNT_H_ diff --git a/third_party/libwebrtc/include/base/refcountedobject.h b/third_party/libwebrtc/include/base/refcountedobject.h new file mode 100644 index 0000000000..9beea333f2 --- /dev/null +++ b/third_party/libwebrtc/include/base/refcountedobject.h @@ -0,0 +1,60 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef LIB_WEBRTC_REFCOUNTEDOBJECT_H_ +#define LIB_WEBRTC_REFCOUNTEDOBJECT_H_ + +#include + +#include "atomicops.h" + +namespace libwebrtc { + +template +class RefCountedObject : public T { + public: + RefCountedObject() {} + + template + explicit RefCountedObject(P0&& p0) : T(std::forward(p0)) {} + + template + RefCountedObject(P0&& p0, P1&& p1, Args&&... args) + : T(std::forward(p0), std::forward(p1), + std::forward(args)...) {} + + virtual int AddRef() const { return AtomicOps::Increment(&ref_count_); } + + virtual int Release() const { + int count = AtomicOps::Decrement(&ref_count_); + if (!count) { + delete this; + } + return count; + } + + // Return whether the reference count is one. If the reference count is used + // in the conventional way, a reference count of 1 implies that the current + // thread owns the reference and no other thread shares it. This call + // performs the test for a reference count of one, and performs the memory + // barrier needed for the owning thread to act on the object, knowing that it + // has exclusive access to the object. + virtual bool HasOneRef() const { + return AtomicOps::AcquireLoad(&ref_count_) == 1; + } + + protected: + virtual ~RefCountedObject() {} + + mutable volatile int ref_count_ = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_REFCOUNTEDOBJECT_H_ diff --git a/third_party/libwebrtc/include/base/scoped_ref_ptr.h b/third_party/libwebrtc/include/base/scoped_ref_ptr.h new file mode 100644 index 0000000000..9f00839cbe --- /dev/null +++ b/third_party/libwebrtc/include/base/scoped_ref_ptr.h @@ -0,0 +1,155 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Originally these classes are from Chromium. +// http://src.chromium.org/viewvc/chrome/trunk/src/base/memory/ref_counted.h?view=markup + +// +// A smart pointer class for reference counted objects. Use this class instead +// of calling AddRef and Release manually on a reference counted object to +// avoid common memory leaks caused by forgetting to Release an object +// reference. Sample usage: +// +// class MyFoo : public RefCounted { +// ... +// }; +// +// void some_function() { +// scoped_refptr foo = new MyFoo(); +// foo->Method(param); +// // |foo| is released when this function returns +// } +// +// void some_other_function() { +// scoped_refptr foo = new MyFoo(); +// ... +// foo = NULL; // explicitly releases |foo| +// ... +// if (foo) +// foo->Method(param); +// } +// +// The above examples show how scoped_refptr acts like a pointer to T. +// Given two scoped_refptr classes, it is also possible to exchange +// references between the two objects, like so: +// +// { +// scoped_refptr a = new MyFoo(); +// scoped_refptr b; +// +// b.swap(a); +// // now, |b| references the MyFoo object, and |a| references NULL. +// } +// +// To make both |a| and |b| in the above example reference the same MyFoo +// object, simply use the assignment operator: +// +// { +// scoped_refptr a = new MyFoo(); +// scoped_refptr b; +// +// b = a; +// // now, |a| and |b| each own a reference to the same MyFoo object. +// } +// + +#ifndef LIB_WEBRTC_SCOPED_REF_PTR_H_ +#define LIB_WEBRTC_SCOPED_REF_PTR_H_ + +#include + +namespace libwebrtc { + +template +class scoped_refptr { + public: + scoped_refptr() : ptr_(NULL) {} + + scoped_refptr(T* p) : ptr_(p) { + if (ptr_) ptr_->AddRef(); + } + + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) ptr_->AddRef(); + } + + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) ptr_->AddRef(); + } + + // Move constructors. + scoped_refptr(scoped_refptr&& r) : ptr_(r.release()) {} + + template + scoped_refptr(scoped_refptr&& r) : ptr_(r.release()) {} + + ~scoped_refptr() { + if (ptr_) ptr_->Release(); + } + + T* get() const { return ptr_; } + operator T*() const { return ptr_; } + T* operator->() const { return ptr_; } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + T* release() { + T* retVal = ptr_; + ptr_ = NULL; + return retVal; + } + + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) p->AddRef(); + if (ptr_) ptr_->Release(); + ptr_ = p; + return *this; + } + + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + + scoped_refptr& operator=(scoped_refptr&& r) { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + + template + scoped_refptr& operator=(scoped_refptr&& r) { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + + void swap(T** pp) { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + + void swap(scoped_refptr& r) { swap(&r.ptr_); } + + protected: + T* ptr_; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_SCOPED_REF_PTR_H_ diff --git a/third_party/libwebrtc/include/helper.h b/third_party/libwebrtc/include/helper.h new file mode 100644 index 0000000000..aa467afa91 --- /dev/null +++ b/third_party/libwebrtc/include/helper.h @@ -0,0 +1,22 @@ +#ifndef HELPER_HXX +#define HELPER_HXX + +#include "rtc_types.h" + +namespace libwebrtc { +/** + * @brief A helper class with static methods for generating random UUIDs. + * + */ +class Helper { + public: + /** + * @brief Generates a random UUID string. + * + * @return The generated UUID string. + */ + LIB_WEBRTC_API static string CreateRandomUuid(); +}; +} // namespace libwebrtc + +#endif // HELPER_HXX diff --git a/third_party/libwebrtc/include/libwebrtc.h b/third_party/libwebrtc/include/libwebrtc.h new file mode 100644 index 0000000000..f17af394c0 --- /dev/null +++ b/third_party/libwebrtc/include/libwebrtc.h @@ -0,0 +1,60 @@ +#ifndef LIB_WEBRTC_HXX +#define LIB_WEBRTC_HXX + +#include "rtc_peerconnection_factory.h" +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * @class LibWebRTC + * @brief Provides static methods for initializing, creating and terminating + * the WebRTC PeerConnectionFactory and threads. + * + * This class provides static methods for initializing, creating and terminating + * the WebRTC PeerConnectionFactory and threads. These methods are thread-safe + * and can be called from any thread. This class is not meant to be + * instantiated. + * + */ +class LibWebRTC { + public: + /** + * @brief Initializes the WebRTC PeerConnectionFactory and threads. + * + * Initializes the WebRTC PeerConnectionFactory and threads. This method is + * thread-safe and can be called from any thread. It initializes SSL and + * creates three threads: worker_thread, signaling_thread and network_thread. + * + * @return true if initialization is successful, false otherwise. + */ + LIB_WEBRTC_API static bool Initialize(); + + /** + * @brief Creates a new WebRTC PeerConnectionFactory. + * + * Creates a new WebRTC PeerConnectionFactory. This method is thread-safe and + * can be called from any thread. It creates a new instance of the + * RTCPeerConnectionFactoryImpl class and initializes it. + * + * @return A scoped_refptr object that points to the newly created + * RTCPeerConnectionFactory. + */ + LIB_WEBRTC_API static scoped_refptr + CreateRTCPeerConnectionFactory(); + + /** + * @brief Terminates the WebRTC PeerConnectionFactory and threads. + * + * Terminates the WebRTC PeerConnectionFactory and threads. This method is + * thread-safe and can be called from any thread. It cleans up SSL and stops + * and destroys the three threads: worker_thread, signaling_thread and + * network_thread. + * + */ + LIB_WEBRTC_API static void Terminate(); +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_device.h b/third_party/libwebrtc/include/rtc_audio_device.h new file mode 100644 index 0000000000..cff7b4a6ac --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_device.h @@ -0,0 +1,99 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_DEVICE_HXX +#define LIB_WEBRTC_RTC_AUDIO_DEVICE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioDevice class is an abstract class used for managing the audio + * devices used by WebRTC. It provides methods for device enumeration and + * selection. + */ +class RTCAudioDevice : public RefCountInterface { + public: + typedef fixed_size_function OnDeviceChangeCallback; + + public: + static const int kAdmMaxDeviceNameSize = 128; + static const int kAdmMaxFileNameSize = 512; + static const int kAdmMaxGuidSize = 128; + + public: + /** + * Returns the number of playout devices available. + * + * @return int16_t - The number of playout devices available. + */ + virtual int16_t PlayoutDevices() = 0; + + /** + * Returns the number of recording devices available. + * + * @return int16_t - The number of recording devices available. + */ + virtual int16_t RecordingDevices() = 0; + + /** + * Retrieves the name and GUID of the specified playout device. + * + * @param index - The index of the device. + * @param name - The device name. + * @param guid - The device GUID. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) = 0; + + /** + * Retrieves the name and GUID of the specified recording device. + * + * @param index - The index of the device. + * @param name - The device name. + * @param guid - The device GUID. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) = 0; + + /** + * Sets the playout device to use. + * + * @param index - The index of the device. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t SetPlayoutDevice(uint16_t index) = 0; + + /** + * Sets the recording device to use. + * + * @param index - The index of the device. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t SetRecordingDevice(uint16_t index) = 0; + + /** + * Registers a listener to be called when audio devices are added or removed. + * + * @param listener - The callback function to register. + * @return int32_t - 0 if successful, otherwise an error code. + */ + virtual int32_t OnDeviceChange(OnDeviceChangeCallback listener) = 0; + + virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0; + + virtual int32_t MicrophoneVolume(uint32_t& volume) = 0; + + virtual int32_t SetSpeakerVolume(uint32_t volume) = 0; + + virtual int32_t SpeakerVolume(uint32_t& volume) = 0; + + protected: + virtual ~RTCAudioDevice() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_DEVICE_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_frame.h b/third_party/libwebrtc/include/rtc_audio_frame.h new file mode 100644 index 0000000000..7774f0c787 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_frame.h @@ -0,0 +1,103 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_FRAME_HXX +#define LIB_WEBRTC_RTC_AUDIO_FRAME_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class AudioFrame : public RefCountInterface { + public: + /** + * @brief Creates a new instance of AudioFrame. + * @return AudioFrame*: a pointer to the newly created AudioFrame. + */ + LIB_WEBRTC_API static AudioFrame* Create(); + + /** + * @brief Creates a new instance of AudioFrame with specified parameters. + * @param id: the unique identifier of the frame. + * @param timestamp: the timestamp of the frame. + * @param data: a pointer to the audio data buffer. + * @param samples_per_channel: the number of samples per channel. + * @param sample_rate_hz: the sample rate in Hz. + * @param num_channels: the number of audio channels. + * @return AudioFrame*: a pointer to the newly created AudioFrame. + */ + LIB_WEBRTC_API static AudioFrame* Create(int id, uint32_t timestamp, + const int16_t* data, + size_t samples_per_channel, + int sample_rate_hz, + size_t num_channels = 1); + + public: + /** + * @brief Updates the audio frame with specified parameters. + * @param id: the unique identifier of the frame. + * @param timestamp: the timestamp of the frame. + * @param data: a pointer to the audio data buffer. + * @param samples_per_channel: the number of samples per channel. + * @param sample_rate_hz: the sample rate in Hz. + * @param num_channels: the number of audio channels. + */ + virtual void UpdateFrame(int id, uint32_t timestamp, const int16_t* data, + size_t samples_per_channel, int sample_rate_hz, + size_t num_channels = 1) = 0; + + /** + * @brief Copies the contents of another AudioFrame. + * @param src: the source AudioFrame to copy from. + */ + virtual void CopyFrom(const AudioFrame& src) = 0; + + /** + * @brief Adds another AudioFrame to this one. + * @param frame_to_add: the AudioFrame to add. + */ + virtual void Add(const AudioFrame& frame_to_add) = 0; + + /** + * @brief Mutes the audio data in this AudioFrame. + */ + virtual void Mute() = 0; + + /** + * @brief Returns a pointer to the audio data buffer. + * @return const int16_t*: a pointer to the audio data buffer. + */ + virtual const int16_t* data() = 0; + + /** + * @brief Returns the number of samples per channel. + * @return size_t: the number of samples per channel. + */ + virtual size_t samples_per_channel() = 0; + + /** + * @brief Returns the sample rate in Hz. + * @return int: the sample rate in Hz. + */ + virtual int sample_rate_hz() = 0; + + /** + * @brief Returns the number of audio channels. + * @return size_t: the number of audio channels. + */ + virtual size_t num_channels() = 0; + + /** + * @brief Returns the timestamp of the AudioFrame. + * @return uint32_t: the timestamp of the AudioFrame. + */ + virtual uint32_t timestamp() = 0; + + /** + * @brief Returns the unique identifier of the AudioFrame. + * @return int: the unique identifier of the AudioFrame. + */ + + virtual int id() = 0; +}; + +}; // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_FRAME_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_processing.h b/third_party/libwebrtc/include/rtc_audio_processing.h new file mode 100644 index 0000000000..908a16a380 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_processing.h @@ -0,0 +1,35 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_PROCESSING_HXX +#define LIB_WEBRTC_RTC_AUDIO_PROCESSING_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCAudioProcessing : public RefCountInterface { + public: + class CustomProcessing { + public: + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + + virtual void Process(int num_bands, int num_frames, int buffer_size, + float* buffer) = 0; + + virtual void Reset(int new_rate) = 0; + + virtual void Release() = 0; + + protected: + virtual ~CustomProcessing() {} + }; + + public: + virtual void SetCapturePostProcessing( + CustomProcessing* capture_post_processing) = 0; + + virtual void SetRenderPreProcessing( + CustomProcessing* render_pre_processing) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_PROCESSING_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_audio_source.h b/third_party/libwebrtc/include/rtc_audio_source.h new file mode 100644 index 0000000000..e2f98fca0f --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_source.h @@ -0,0 +1,35 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX +#define LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioSource class is a base class for audio sources in WebRTC. + * Audio sources represent the source of audio data in WebRTC, such as a + * microphone or a file. This class provides a base interface for audio + * sources to implement, allowing them to be used with WebRTC's audio + * processing and transmission mechanisms. + */ +class RTCAudioSource : public RefCountInterface { + public: + enum SourceType { kMicrophone, kCustom }; + + public: + virtual void CaptureFrame(const void* audio_data, int bits_per_sample, + int sample_rate, size_t number_of_channels, + size_t number_of_frames) = 0; + + virtual SourceType GetSourceType() const = 0; + + protected: + /** + * The destructor for the RTCAudioSource class. + */ + virtual ~RTCAudioSource() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX diff --git a/third_party/libwebrtc/include/rtc_audio_track.h b/third_party/libwebrtc/include/rtc_audio_track.h new file mode 100644 index 0000000000..82459005d1 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_audio_track.h @@ -0,0 +1,32 @@ +#ifndef LIB_WEBRTC_RTC_AUDIO_TRACK_HXX +#define LIB_WEBRTC_RTC_AUDIO_TRACK_HXX + +#include "rtc_media_track.h" +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioTrack class represents an audio track in WebRTC. + * Audio tracks are used to transmit audio data over a WebRTC peer connection. + * This class is a subclass of the RTCMediaTrack class, which provides a base + * interface for all media tracks in WebRTC. + */ +class RTCAudioTrack : public RTCMediaTrack { + public: + // volume in [0-10] + virtual void SetVolume(double volume) = 0; + + virtual void AddSink(AudioTrackSink* sink) = 0; + + virtual void RemoveSink(AudioTrackSink* sink) = 0; + + protected: + /** + * The destructor for the RTCAudioTrack class. + */ + virtual ~RTCAudioTrack() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX diff --git a/third_party/libwebrtc/include/rtc_data_channel.h b/third_party/libwebrtc/include/rtc_data_channel.h new file mode 100644 index 0000000000..e1351959f2 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_data_channel.h @@ -0,0 +1,124 @@ +#ifndef LIB_WEBRTC_RTC_DATA_CHANNEL_HXX +#define LIB_WEBRTC_RTC_DATA_CHANNEL_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCDataChannelState enum represents the possible states of a WebRTC data + * channel. Data channels are used to transmit non-audio/video data over a + * WebRTC peer connection. The possible states are: connecting, open, closing, + * and closed. + */ +enum RTCDataChannelState { + RTCDataChannelConnecting, + RTCDataChannelOpen, + RTCDataChannelClosing, + RTCDataChannelClosed, +}; + +/** + * The RTCDataChannelInit struct represents the configuration options for a + * WebRTC data channel. These options include whether the channel is ordered and + * reliable, the maximum retransmit time and number of retransmits, the protocol + * to use (sctp or quic), whether the channel is negotiated, and the channel ID. + */ +struct RTCDataChannelInit { + bool ordered = true; + bool reliable = true; + int maxRetransmitTime = -1; + int maxRetransmits = -1; + string protocol = {"sctp"}; // sctp | quic + bool negotiated = false; + int id = 0; +}; + +/** + * The RTCDataChannelObserver class is an interface for receiving events related + * to a WebRTC data channel. These events include changes in the channel's state + * and incoming messages. + */ +class RTCDataChannelObserver { + public: + /** + * Called when the state of the data channel changes. + * The new state is passed as a parameter. + */ + virtual void OnStateChange(RTCDataChannelState state) = 0; + + /** + * Called when a message is received on the data channel. + * The message buffer, its length, and a boolean indicating whether the + * message is binary are passed as parameters. + */ + virtual void OnMessage(const char* buffer, int length, bool binary) = 0; + + protected: + /** + * The destructor for the RTCDataChannelObserver class. + */ + virtual ~RTCDataChannelObserver() = default; +}; + +/** + * The RTCDataChannel class represents a data channel in WebRTC. + * Data channels are used to transmit non-audio/video data over a WebRTC peer + * connection. This class provides a base interface for data channels to + * implement, allowing them to be used with WebRTC's data channel mechanisms. + */ +class RTCDataChannel : public RefCountInterface { + public: + /** + * Sends data over the data channel. + * The data buffer, its size, and a boolean indicating whether the data is + * binary are passed as parameters. + */ + virtual void Send(const uint8_t* data, uint32_t size, + bool binary = false) = 0; + + /** + * Closes the data channel. + */ + virtual void Close() = 0; + + /** + * Registers an observer for events related to the data channel. + * The observer object is passed as a parameter. + */ + virtual void RegisterObserver(RTCDataChannelObserver* observer) = 0; + + /** + * Unregisters the current observer for the data channel. + */ + virtual void UnregisterObserver() = 0; + + /** + * Returns the label of the data channel. + */ + virtual const string label() const = 0; + + /** + * Returns the ID of the data channel. + */ + virtual int id() const = 0; + + /** + * Returns the amount of data buffered in the data channel. + * + * @return uint64_t + */ + virtual uint64_t buffered_amount() const = 0; + + /** + * Returns the state of the data channel. + */ + virtual RTCDataChannelState state() = 0; + + protected: + virtual ~RTCDataChannel() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_DATA_CHANNEL_HXX diff --git a/third_party/libwebrtc/include/rtc_desktop_capturer.h b/third_party/libwebrtc/include/rtc_desktop_capturer.h new file mode 100644 index 0000000000..9d2e955ab3 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_desktop_capturer.h @@ -0,0 +1,138 @@ +/** + * @file rtc_desktop_capturer.h + * This header file defines the interface for capturing desktop media. + */ + +#ifndef LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX +#define LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX + +#include "rtc_desktop_media_list.h" +#include "rtc_types.h" +#include "rtc_video_device.h" + +namespace libwebrtc { + +class DesktopCapturerObserver; + +/** + * @brief The interface for capturing desktop media. + * + * This interface defines methods for registering and deregistering observer + * for desktop capture events, starting and stopping desktop capture, and + * retrieving the current capture state and media source. + */ +class RTCDesktopCapturer : public RefCountInterface { + public: + /** + * @brief Enumeration for the possible states of desktop capture. + */ + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED }; + + public: + /** + * @brief Registers the given observer for desktop capture events. + * + * @param observer Pointer to the observer to be registered. + */ + virtual void RegisterDesktopCapturerObserver( + DesktopCapturerObserver* observer) = 0; + + /** + * @brief Deregisters the currently registered desktop capture observer. + */ + virtual void DeRegisterDesktopCapturerObserver() = 0; + + /** + * @brief Starts desktop capture with the given frame rate. + * + * @param fps The desired frame rate. + * + * @return The current capture state after attempting to start capture. + */ + virtual CaptureState Start(uint32_t fps) = 0; + + /** + * @brief Starts desktop capture with the given frame rate and capture + * dimensions. + * + * @param fps The desired frame rate. + * @param x The left-most pixel coordinate of the capture region. + * @param y The top-most pixel coordinate of the capture region. + * @param w The width of the capture region. + * @param h The height of the capture region. + * + * @return The current capture state after attempting to start capture. + */ + virtual CaptureState Start(uint32_t fps, uint32_t x, uint32_t y, uint32_t w, + uint32_t h) = 0; + + /** + * @brief Stops desktop capture. + */ + virtual void Stop() = 0; + + /** + * @brief Checks if desktop capture is currently running. + * + * @return True if capture is running, false otherwise. + */ + virtual bool IsRunning() = 0; + + /** + * @brief Retrieves the media source for the current desktop capture. + * + * @return A scoped_refptr representing the current capture + * media source. + */ + virtual scoped_refptr source() = 0; + + /** + * @brief Destroys the RTCDesktopCapturer object. + */ + virtual ~RTCDesktopCapturer() {} +}; + +/** + * @brief Observer interface for desktop capturer events. + * + * This class defines the interface for an observer of the DesktopCapturer + * class, allowing clients to be notified of events such as when capturing + * begins or ends, and when an error occurs. + */ +class DesktopCapturerObserver { + public: + /** + * @brief Called when desktop capture starts. + * + * @param capturer A reference to the capturer that started capturing. + */ + virtual void OnStart(scoped_refptr capturer) = 0; + + /** + * @brief Called when desktop capture is paused. + * + * @param capturer A reference to the capturer that paused capturing. + */ + virtual void OnPaused(scoped_refptr capturer) = 0; + + /** + * @brief Called when desktop capture stops. + * + * @param capturer A reference to the capturer that stopped capturing. + */ + virtual void OnStop(scoped_refptr capturer) = 0; + + /** + * @brief Called when an error occurs during desktop capture. + * + * @param capturer A reference to the capturer that encountered an error. + */ + virtual void OnError(scoped_refptr capturer) = 0; + + protected: + ~DesktopCapturerObserver() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX diff --git a/third_party/libwebrtc/include/rtc_desktop_device.h b/third_party/libwebrtc/include/rtc_desktop_device.h new file mode 100644 index 0000000000..e3e4c6fab1 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_desktop_device.h @@ -0,0 +1,25 @@ +#ifndef LIB_WEBRTC_RTC_DESKTOP_DEVICE_HXX +#define LIB_WEBRTC_RTC_DESKTOP_DEVICE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class MediaSource; +class RTCDesktopCapturer; +class RTCDesktopMediaList; + +class RTCDesktopDevice : public RefCountInterface { + public: + virtual scoped_refptr CreateDesktopCapturer( + scoped_refptr source) = 0; + virtual scoped_refptr GetDesktopMediaList( + DesktopType type) = 0; + + protected: + virtual ~RTCDesktopDevice() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_desktop_media_list.h b/third_party/libwebrtc/include/rtc_desktop_media_list.h new file mode 100644 index 0000000000..48faf2a959 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_desktop_media_list.h @@ -0,0 +1,66 @@ +#ifndef LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX +#define LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class MediaSource : public RefCountInterface { + public: + // source id + virtual string id() const = 0; + + // source name + virtual string name() const = 0; + + // Returns the thumbnail of the source, jpeg format. + virtual portable::vector thumbnail() const = 0; + + virtual DesktopType type() const = 0; + + virtual bool UpdateThumbnail() = 0; + + protected: + virtual ~MediaSource() {} +}; + +class MediaListObserver { + public: + virtual void OnMediaSourceAdded(scoped_refptr source) = 0; + + virtual void OnMediaSourceRemoved(scoped_refptr source) = 0; + + virtual void OnMediaSourceNameChanged(scoped_refptr source) = 0; + + virtual void OnMediaSourceThumbnailChanged( + scoped_refptr source) = 0; + + protected: + virtual ~MediaListObserver() {} +}; + +class RTCDesktopMediaList : public RefCountInterface { + public: + virtual void RegisterMediaListObserver(MediaListObserver* observer) = 0; + + virtual void DeRegisterMediaListObserver() = 0; + + virtual DesktopType type() const = 0; + + virtual int32_t UpdateSourceList(bool force_reload = false, + bool get_thumbnail = true) = 0; + + virtual int GetSourceCount() const = 0; + + virtual scoped_refptr GetSource(int index) = 0; + + virtual bool GetThumbnail(scoped_refptr source, + bool notify = false) = 0; + + protected: + ~RTCDesktopMediaList() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_dtls_transport.h b/third_party/libwebrtc/include/rtc_dtls_transport.h new file mode 100644 index 0000000000..fe7adad7f0 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_dtls_transport.h @@ -0,0 +1,51 @@ +#ifndef LIB_WEBRTC_DTLS_TRANSPORT_H_ +#define LIB_WEBRTC_DTLS_TRANSPORT_H_ + +#include "base/refcount.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCDtlsTransportInformation : public RefCountInterface { + public: + enum class RTCDtlsTransportState { + kNew, // Has not started negotiating yet. + kConnecting, // In the process of negotiating a secure connection. + kConnected, // Completed negotiation and verified fingerprints. + kClosed, // Intentionally closed. + kFailed, // Failure due to an error or failing to verify a remote + // fingerprint. + kNumValues + }; + virtual RTCDtlsTransportInformation& operator=( + scoped_refptr c) = 0; + + virtual RTCDtlsTransportState state() const = 0; + virtual int ssl_cipher_suite() const = 0; + virtual int srtp_cipher_suite() const = 0; +}; + +class RTCDtlsTransportObserver { + public: + virtual void OnStateChange(RTCDtlsTransportInformation info) = 0; + + virtual void OnError(const int type, const char* message) = 0; + + protected: + virtual ~RTCDtlsTransportObserver() = default; +}; + +class RTCDtlsTransport : public RefCountInterface { + LIB_WEBRTC_API static scoped_refptr Create(); + + public: + virtual scoped_refptr GetInformation() = 0; + + virtual void RegisterObserver(RTCDtlsTransportObserver* observer) = 0; + + virtual void UnregisterObserver() = 0; +}; + +} // namespace libwebrtc + +#endif // API_DTLS_TRANSPORT_INTERFACE_H_ diff --git a/third_party/libwebrtc/include/rtc_dtmf_sender.h b/third_party/libwebrtc/include/rtc_dtmf_sender.h new file mode 100644 index 0000000000..7f42cc56b5 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_dtmf_sender.h @@ -0,0 +1,47 @@ + +#ifndef LIB_WEBRTC_DTMF_SENDER__H_ +#define LIB_WEBRTC_DTMF_SENDER__H_ + +#include "base/refcount.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCDtmfSenderObserver { + public: + virtual void OnToneChange(const string tone, const string tone_buffer) = 0; + + virtual void OnToneChange(const string tone) = 0; + + protected: + virtual ~RTCDtmfSenderObserver() = default; +}; + +class RTCDtmfSender : public RefCountInterface { + public: + static const int kDtmfDefaultCommaDelayMs = 2000; + + virtual void RegisterObserver(RTCDtmfSenderObserver* observer) = 0; + + virtual void UnregisterObserver() = 0; + + virtual bool InsertDtmf(const string tones, int duration, + int inter_tone_gap) = 0; + + virtual bool InsertDtmf(const string tones, int duration, int inter_tone_gap, + int comma_delay) = 0; + + virtual bool CanInsertDtmf() = 0; + + virtual const string tones() const = 0; + + virtual int duration() const = 0; + + virtual int inter_tone_gap() const = 0; + + virtual int comma_delay() const = 0; +}; + +} // namespace libwebrtc + +#endif // API_DTMF_SENDER__H_ diff --git a/third_party/libwebrtc/include/rtc_frame_cryptor.h b/third_party/libwebrtc/include/rtc_frame_cryptor.h new file mode 100644 index 0000000000..47f3a409b7 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_frame_cryptor.h @@ -0,0 +1,139 @@ +#ifndef LIB_RTC_FRAME_CYRPTOR_H_ +#define LIB_RTC_FRAME_CYRPTOR_H_ + +#include "base/refcount.h" +#include "rtc_peerconnection_factory.h" +#include "rtc_rtp_receiver.h" +#include "rtc_rtp_sender.h" +#include "rtc_types.h" + +namespace libwebrtc { + +enum class Algorithm { + kAesGcm = 0, + kAesCbc, +}; + +#define DEFAULT_KEYRING_SIZE 16 +#define MAX_KEYRING_SIZE 255 + +struct KeyProviderOptions { + bool shared_key; + vector ratchet_salt; + vector uncrypted_magic_bytes; + int ratchet_window_size; + int failure_tolerance; + // The size of the key ring. between 1 and 255. + int key_ring_size; + bool discard_frame_when_cryptor_not_ready; + KeyProviderOptions() + : shared_key(false), + ratchet_salt(vector()), + ratchet_window_size(0), + failure_tolerance(-1), + key_ring_size(DEFAULT_KEYRING_SIZE), + discard_frame_when_cryptor_not_ready(false) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + ratchet_window_size(copy.ratchet_window_size), + failure_tolerance(copy.failure_tolerance), + key_ring_size(copy.key_ring_size) {} +}; + +/// Shared secret key for frame encryption. +class KeyProvider : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(KeyProviderOptions*); + + virtual bool SetSharedKey(int index, vector key) = 0; + + virtual vector RatchetSharedKey(int key_index) = 0; + + virtual vector ExportSharedKey(int key_index) = 0; + + /// Set the key at the given index. + virtual bool SetKey(const string participant_id, int index, + vector key) = 0; + + virtual vector RatchetKey(const string participant_id, + int key_index) = 0; + + virtual vector ExportKey(const string participant_id, + int key_index) = 0; + + virtual void SetSifTrailer(vector trailer) = 0; + + protected: + virtual ~KeyProvider() {} +}; + +enum RTCFrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class RTCFrameCryptorObserver : public RefCountInterface { + public: + virtual void OnFrameCryptionStateChanged(const string participant_id, + RTCFrameCryptionState state) = 0; + + protected: + virtual ~RTCFrameCryptorObserver() {} +}; + +/// Frame encryption/decryption. +/// +class RTCFrameCryptor : public RefCountInterface { + public: + /// Enable/Disable frame crypto for the sender or receiver. + virtual bool SetEnabled(bool enabled) = 0; + + /// Get the enabled state for the sender or receiver. + virtual bool enabled() const = 0; + + /// Set the key index for the sender or receiver. + /// If the key index is not set, the key index will be set to 0. + virtual bool SetKeyIndex(int index) = 0; + + /// Get the key index for the sender or receiver. + virtual int key_index() const = 0; + + virtual const string participant_id() const = 0; + + virtual void RegisterRTCFrameCryptorObserver( + scoped_refptr observer) = 0; + + virtual void DeRegisterRTCFrameCryptorObserver() = 0; + + protected: + virtual ~RTCFrameCryptor() {} +}; + +class FrameCryptorFactory { + public: + /// Create a frame cyrptor for [RTCRtpSender]. + LIB_WEBRTC_API static scoped_refptr + frameCryptorFromRtpSender(scoped_refptr factory, + const string participant_id, + scoped_refptr sender, + Algorithm algorithm, + scoped_refptr key_provider); + + /// Create a frame cyrptor for [RTCRtpReceiver]. + LIB_WEBRTC_API static scoped_refptr + frameCryptorFromRtpReceiver(scoped_refptr factory, + const string participant_id, + scoped_refptr receiver, + Algorithm algorithm, + scoped_refptr key_provider); +}; + +} // namespace libwebrtc + +#endif // LIB_RTC_FRAME_CYRPTOR_H_ \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_ice_candidate.h b/third_party/libwebrtc/include/rtc_ice_candidate.h new file mode 100644 index 0000000000..82da46edb4 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_ice_candidate.h @@ -0,0 +1,29 @@ +#ifndef LIB_WEBRTC_RTC_ICE_CANDIDATE_HXX +#define LIB_WEBRTC_RTC_ICE_CANDIDATE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCIceCandidate : public RefCountInterface { + public: + static LIB_WEBRTC_API scoped_refptr Create( + const string sdp, const string sdp_mid, int sdp_mline_index, + SdpParseError* error); + + public: + virtual const string candidate() const = 0; + + virtual const string sdp_mid() const = 0; + + virtual int sdp_mline_index() const = 0; + + virtual bool ToString(string& out) = 0; + + protected: + virtual ~RTCIceCandidate() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_ICE_CANDIDATE_HXX diff --git a/third_party/libwebrtc/include/rtc_ice_transport.h b/third_party/libwebrtc/include/rtc_ice_transport.h new file mode 100644 index 0000000000..645f305b2d --- /dev/null +++ b/third_party/libwebrtc/include/rtc_ice_transport.h @@ -0,0 +1,75 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LIB_WEBRTC_RTC_ICE_TRANSPORT_H_ +#define LIB_WEBRTC_RTC_ICE_TRANSPORT_H_ + +#include + +#include "api/async_dns_resolver.h" +#include "api/async_resolver_factory.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" + +namespace libwebrtc { + +class IceTransport : public RefCountInterface { + public: + virtual IceTransport* internal() = 0; +}; + +class IceTransportInit final { + public: + IceTransportInit() = default; + IceTransportInit(const IceTransportInit&) = delete; + IceTransportInit(IceTransportInit&&) = default; + IceTransportInit& operator=(const IceTransportInit&) = delete; + IceTransportInit& operator=(IceTransportInit&&) = default; + + cricket::PortAllocator* port_allocator() { return port_allocator_; } + void set_port_allocator(cricket::PortAllocator* port_allocator) { + port_allocator_ = port_allocator; + } + + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory() { + return async_dns_resolver_factory_; + } + void set_async_dns_resolver_factory( + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory) { + RTC_DCHECK(!async_resolver_factory_); + async_dns_resolver_factory_ = async_dns_resolver_factory; + } + AsyncResolverFactory* async_resolver_factory() { + return async_resolver_factory_; + } + ABSL_DEPRECATED("bugs.webrtc.org/12598") + void set_async_resolver_factory( + AsyncResolverFactory* async_resolver_factory) { + RTC_DCHECK(!async_dns_resolver_factory_); + async_resolver_factory_ = async_resolver_factory; + } + + RtcEventLog* event_log() { return event_log_; } + void set_event_log(RtcEventLog* event_log) { event_log_ = event_log; } +}; + +class IceTransportFactory { + public: + virtual ~IceTransportFactory() = default; + + virtual scoped_refptr CreateIceTransport( + const std::string& transport_name, int component, + IceTransportInit init) = 0; +}; + +} // namespace libwebrtc +#endif // API_ICE_TRANSPORT_INTERFACE_H_ diff --git a/third_party/libwebrtc/include/rtc_media_stream.h b/third_party/libwebrtc/include/rtc_media_stream.h new file mode 100644 index 0000000000..17c04d1ba4 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_media_stream.h @@ -0,0 +1,42 @@ +#ifndef LIB_WEBRTC_RTC_MEDIA_STREAM_HXX +#define LIB_WEBRTC_RTC_MEDIA_STREAM_HXX + +#include "rtc_audio_track.h" +#include "rtc_types.h" +#include "rtc_video_track.h" + +namespace libwebrtc { + +class RTCMediaStream : public RefCountInterface { + public: + virtual bool AddTrack(scoped_refptr track) = 0; + + virtual bool AddTrack(scoped_refptr track) = 0; + + virtual bool RemoveTrack(scoped_refptr track) = 0; + + virtual bool RemoveTrack(scoped_refptr track) = 0; + + virtual vector> audio_tracks() = 0; + + virtual vector> video_tracks() = 0; + + virtual vector> tracks() = 0; + + virtual scoped_refptr FindAudioTrack( + const string track_id) = 0; + + virtual scoped_refptr FindVideoTrack( + const string track_id) = 0; + + virtual const string label() = 0; + + virtual const string id() = 0; + + protected: + ~RTCMediaStream() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_STREAM_HXX diff --git a/third_party/libwebrtc/include/rtc_media_track.h b/third_party/libwebrtc/include/rtc_media_track.h new file mode 100644 index 0000000000..d971c89d27 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_media_track.h @@ -0,0 +1,43 @@ +#ifndef LIB_WEBRTC_RTC_MEDIA_TRACK_HXX +#define LIB_WEBRTC_RTC_MEDIA_TRACK_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class AudioTrackSink { + public: + virtual void OnData(const void* audio_data, int bits_per_sample, + int sample_rate, size_t number_of_channels, + size_t number_of_frames) = 0; + protected: + virtual ~AudioTrackSink() {} +}; + +/*Media Track interface*/ +class RTCMediaTrack : public RefCountInterface { + public: + enum RTCTrackState { + kLive, + kEnded, + }; + virtual RTCTrackState state() const = 0; + + /*track type: audio/video*/ + virtual const string kind() const = 0; + + /*track id*/ + virtual const string id() const = 0; + + virtual bool enabled() const = 0; + + /*mute track*/ + virtual bool set_enabled(bool enable) = 0; + + protected: + ~RTCMediaTrack() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_TRACK_HXX diff --git a/third_party/libwebrtc/include/rtc_mediaconstraints.h b/third_party/libwebrtc/include/rtc_mediaconstraints.h new file mode 100644 index 0000000000..2596e6153f --- /dev/null +++ b/third_party/libwebrtc/include/rtc_mediaconstraints.h @@ -0,0 +1,79 @@ +#ifndef LIB_WEBRTC_RTC_MEDIA_CONSTRAINTS_HXX +#define LIB_WEBRTC_RTC_MEDIA_CONSTRAINTS_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCMediaConstraints : public RefCountInterface { + public: + // These keys are google specific. + LIB_WEBRTC_API static const char* + kGoogEchoCancellation; // googEchoCancellation + + LIB_WEBRTC_API static const char* + kExtendedFilterEchoCancellation; // googEchoCancellation2 + LIB_WEBRTC_API static const char* + kDAEchoCancellation; // googDAEchoCancellation + LIB_WEBRTC_API static const char* kAutoGainControl; // googAutoGainControl + LIB_WEBRTC_API static const char* kNoiseSuppression; // googNoiseSuppression + LIB_WEBRTC_API static const char* kHighpassFilter; // googHighpassFilter + LIB_WEBRTC_API static const char* kAudioMirroring; // googAudioMirroring + LIB_WEBRTC_API static const char* + kAudioNetworkAdaptorConfig; // goodAudioNetworkAdaptorConfig + + // Constraint keys for CreateOffer / CreateAnswer + // Specified by the W3C PeerConnection spec + LIB_WEBRTC_API static const char* + kOfferToReceiveVideo; // OfferToReceiveVideo + LIB_WEBRTC_API static const char* + kOfferToReceiveAudio; // OfferToReceiveAudio + LIB_WEBRTC_API static const char* + kVoiceActivityDetection; // VoiceActivityDetection + LIB_WEBRTC_API static const char* kIceRestart; // IceRestart + // These keys are google specific. + LIB_WEBRTC_API static const char* kUseRtpMux; // googUseRtpMUX + + // Constraints values. + LIB_WEBRTC_API static const char* kValueTrue; // true + LIB_WEBRTC_API static const char* kValueFalse; // false + + // PeerConnection constraint keys. + // Temporary pseudo-constraints used to enable DataChannels + LIB_WEBRTC_API static const char* + kEnableRtpDataChannels; // Enable RTP DataChannels + // Google-specific constraint keys. + // Temporary pseudo-constraint for enabling DSCP through JS. + LIB_WEBRTC_API static const char* kEnableDscp; // googDscp + // Constraint to enable IPv6 through JS. + LIB_WEBRTC_API static const char* kEnableIPv6; // googIPv6 + // Temporary constraint to enable suspend below min bitrate feature. + LIB_WEBRTC_API static const char* kEnableVideoSuspendBelowMinBitrate; + // googSuspendBelowMinBitrate + // Constraint to enable combined audio+video bandwidth estimation. + //LIB_WEBRTC_API static const char* + // kCombinedAudioVideoBwe; // googCombinedAudioVideoBwe + LIB_WEBRTC_API static const char* + kScreencastMinBitrate; // googScreencastMinBitrate + LIB_WEBRTC_API static const char* + kCpuOveruseDetection; // googCpuOveruseDetection + + // Specifies number of simulcast layers for all video tracks + // with a Plan B offer/answer + // (see RTCOfferAnswerOptions::num_simulcast_layers). + LIB_WEBRTC_API static const char* kNumSimulcastLayers; + + public: + LIB_WEBRTC_API static scoped_refptr Create(); + + virtual void AddMandatoryConstraint(const string key, const string value) = 0; + + virtual void AddOptionalConstraint(const string key, const string value) = 0; + + protected: + virtual ~RTCMediaConstraints() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_CONSTRAINTS_HXX diff --git a/third_party/libwebrtc/include/rtc_peerconnection.h b/third_party/libwebrtc/include/rtc_peerconnection.h new file mode 100644 index 0000000000..35ea2ad3ea --- /dev/null +++ b/third_party/libwebrtc/include/rtc_peerconnection.h @@ -0,0 +1,273 @@ +#ifndef LIB_WEBRTC_RTC_PEERCONNECTION_HXX +#define LIB_WEBRTC_RTC_PEERCONNECTION_HXX + +#include "rtc_audio_track.h" +#include "rtc_data_channel.h" +#include "rtc_ice_candidate.h" +#include "rtc_media_stream.h" +#include "rtc_mediaconstraints.h" +#include "rtc_rtp_sender.h" +#include "rtc_rtp_transceiver.h" +#include "rtc_session_description.h" +#include "rtc_video_source.h" +#include "rtc_video_track.h" + +namespace libwebrtc { + +enum SessionDescriptionErrorType { + kPeerConnectionInitFailed = 0, + kCreatePeerConnectionFailed, + kSDPParseFailed, +}; + +enum RTCPeerConnectionState { + RTCPeerConnectionStateNew, + RTCPeerConnectionStateConnecting, + RTCPeerConnectionStateConnected, + RTCPeerConnectionStateDisconnected, + RTCPeerConnectionStateFailed, + RTCPeerConnectionStateClosed, +}; + +enum RTCSignalingState { + RTCSignalingStateStable, + RTCSignalingStateHaveLocalOffer, + RTCSignalingStateHaveRemoteOffer, + RTCSignalingStateHaveLocalPrAnswer, + RTCSignalingStateHaveRemotePrAnswer, + RTCSignalingStateClosed +}; + +enum RTCIceGatheringState { + RTCIceGatheringStateNew, + RTCIceGatheringStateGathering, + RTCIceGatheringStateComplete +}; + +enum RTCIceConnectionState { + RTCIceConnectionStateNew, + RTCIceConnectionStateChecking, + RTCIceConnectionStateCompleted, + RTCIceConnectionStateConnected, + RTCIceConnectionStateFailed, + RTCIceConnectionStateDisconnected, + RTCIceConnectionStateClosed, + RTCIceConnectionStateMax, +}; + +class RTCStatsMember : public RefCountInterface { + public: + // Member value types. + enum Type { + kBool, // bool + kInt32, // int32_t + kUint32, // uint32_t + kInt64, // int64_t + kUint64, // uint64_t + kDouble, // double + kString, // std::string + + kSequenceBool, // std::vector + kSequenceInt32, // std::vector + kSequenceUint32, // std::vector + kSequenceInt64, // std::vector + kSequenceUint64, // std::vector + kSequenceDouble, // std::vector + kSequenceString, // std::vector + + kMapStringUint64, // std::map + kMapStringDouble, // std::map + }; + + public: + virtual string GetName() const = 0; + virtual Type GetType() const = 0; + virtual bool IsDefined() const = 0; + + virtual bool ValueBool() const = 0; + virtual int32_t ValueInt32() const = 0; + virtual uint32_t ValueUint32() const = 0; + virtual int64_t ValueInt64() const = 0; + virtual uint64_t ValueUint64() const = 0; + virtual double ValueDouble() const = 0; + virtual string ValueString() const = 0; + virtual vector ValueSequenceBool() const = 0; + virtual vector ValueSequenceInt32() const = 0; + virtual vector ValueSequenceUint32() const = 0; + virtual vector ValueSequenceInt64() const = 0; + virtual vector ValueSequenceUint64() const = 0; + virtual vector ValueSequenceDouble() const = 0; + virtual vector ValueSequenceString() const = 0; + virtual map ValueMapStringUint64() const = 0; + virtual map ValueMapStringDouble() const = 0; + + protected: + virtual ~RTCStatsMember() {} +}; + +class MediaRTCStats : public RefCountInterface { + public: + virtual const string id() = 0; + + virtual const string type() = 0; + + virtual int64_t timestamp_us() = 0; + + virtual const string ToJson() = 0; + + virtual const vector> Members() = 0; +}; + +typedef fixed_size_function> reports)> + OnStatsCollectorSuccess; + +typedef fixed_size_function OnStatsCollectorFailure; + +typedef fixed_size_function + OnSdpCreateSuccess; + +typedef fixed_size_function OnSdpCreateFailure; + +typedef fixed_size_function OnSetSdpSuccess; + +typedef fixed_size_function OnSetSdpFailure; + +typedef fixed_size_function + OnGetSdpSuccess; + +typedef fixed_size_function OnGetSdpFailure; + +class RTCPeerConnectionObserver { + public: + virtual void OnSignalingState(RTCSignalingState state) = 0; + + virtual void OnPeerConnectionState(RTCPeerConnectionState state) = 0; + + virtual void OnIceGatheringState(RTCIceGatheringState state) = 0; + + virtual void OnIceConnectionState(RTCIceConnectionState state) = 0; + + virtual void OnIceCandidate(scoped_refptr candidate) = 0; + + virtual void OnAddStream(scoped_refptr stream) = 0; + + virtual void OnRemoveStream(scoped_refptr stream) = 0; + + virtual void OnDataChannel(scoped_refptr data_channel) = 0; + + virtual void OnRenegotiationNeeded() = 0; + + virtual void OnTrack(scoped_refptr transceiver) = 0; + + virtual void OnAddTrack(vector> streams, + scoped_refptr receiver) = 0; + + virtual void OnRemoveTrack(scoped_refptr receiver) = 0; + + protected: + virtual ~RTCPeerConnectionObserver() {} +}; + +class RTCPeerConnection : public RefCountInterface { + public: + virtual int AddStream(scoped_refptr stream) = 0; + + virtual int RemoveStream(scoped_refptr stream) = 0; + + virtual scoped_refptr CreateLocalMediaStream( + const string stream_id) = 0; + + virtual scoped_refptr CreateDataChannel( + const string label, RTCDataChannelInit* dataChannelDict) = 0; + + virtual void CreateOffer(OnSdpCreateSuccess success, + OnSdpCreateFailure failure, + scoped_refptr constraints) = 0; + + virtual void CreateAnswer(OnSdpCreateSuccess success, + OnSdpCreateFailure failure, + scoped_refptr constraints) = 0; + + virtual void RestartIce() = 0; + + virtual void Close() = 0; + + virtual void SetLocalDescription(const string sdp, const string type, + OnSetSdpSuccess success, + OnSetSdpFailure failure) = 0; + + virtual void SetRemoteDescription(const string sdp, const string type, + OnSetSdpSuccess success, + OnSetSdpFailure failure) = 0; + + virtual void GetLocalDescription(OnGetSdpSuccess success, + OnGetSdpFailure failure) = 0; + + virtual void GetRemoteDescription(OnGetSdpSuccess success, + OnGetSdpFailure failure) = 0; + + virtual void AddCandidate(const string mid, int mid_mline_index, + const string candiate) = 0; + + virtual void RegisterRTCPeerConnectionObserver( + RTCPeerConnectionObserver* observer) = 0; + + virtual void DeRegisterRTCPeerConnectionObserver() = 0; + + virtual vector> local_streams() = 0; + + virtual vector> remote_streams() = 0; + + virtual bool GetStats(scoped_refptr sender, + OnStatsCollectorSuccess success, + OnStatsCollectorFailure failure) = 0; + + virtual bool GetStats(scoped_refptr receiver, + OnStatsCollectorSuccess success, + OnStatsCollectorFailure failure) = 0; + + virtual void GetStats(OnStatsCollectorSuccess success, + OnStatsCollectorFailure failure) = 0; + + virtual scoped_refptr AddTransceiver( + scoped_refptr track, + scoped_refptr init) = 0; + + virtual scoped_refptr AddTransceiver( + scoped_refptr track) = 0; + + virtual scoped_refptr AddTrack( + scoped_refptr track, const vector streamIds) = 0; + + virtual scoped_refptr AddTransceiver( + RTCMediaType media_type) = 0; + + virtual scoped_refptr AddTransceiver( + RTCMediaType media_type, scoped_refptr init) = 0; + + virtual bool RemoveTrack(scoped_refptr render) = 0; + + virtual vector> senders() = 0; + + virtual vector> transceivers() = 0; + + virtual vector> receivers() = 0; + + virtual RTCSignalingState signaling_state() = 0; + + virtual RTCIceConnectionState ice_connection_state() = 0; + + virtual RTCIceConnectionState standardized_ice_connection_state() = 0; + + virtual RTCPeerConnectionState peer_connection_state() = 0; + + virtual RTCIceGatheringState ice_gathering_state() = 0; + + protected: + virtual ~RTCPeerConnection() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_PEERCONNECTION_HXX diff --git a/third_party/libwebrtc/include/rtc_peerconnection_factory.h b/third_party/libwebrtc/include/rtc_peerconnection_factory.h new file mode 100644 index 0000000000..1b6b8c1aec --- /dev/null +++ b/third_party/libwebrtc/include/rtc_peerconnection_factory.h @@ -0,0 +1,75 @@ +#ifndef LIB_WEBRTC_RTC_PEERCONNECTION_FACTORY_HXX +#define LIB_WEBRTC_RTC_PEERCONNECTION_FACTORY_HXX + +#include "rtc_audio_source.h" +#include "rtc_audio_track.h" +#include "rtc_types.h" +#ifdef RTC_DESKTOP_DEVICE +#include "rtc_desktop_device.h" +#endif +#include "rtc_media_stream.h" +#include "rtc_mediaconstraints.h" +#include "rtc_video_device.h" +#include "rtc_video_source.h" + +namespace libwebrtc { + +class RTCPeerConnection; +class RTCAudioDevice; +class RTCAudioProcessing; +class RTCVideoDevice; +class RTCRtpCapabilities; + +class RTCPeerConnectionFactory : public RefCountInterface { + public: + virtual bool Initialize() = 0; + + virtual bool Terminate() = 0; + + virtual scoped_refptr Create( + const RTCConfiguration& configuration, + scoped_refptr constraints) = 0; + + virtual void Delete(scoped_refptr peerconnection) = 0; + + virtual scoped_refptr GetAudioDevice() = 0; + + virtual scoped_refptr GetAudioProcessing() = 0; + + virtual scoped_refptr GetVideoDevice() = 0; +#ifdef RTC_DESKTOP_DEVICE + virtual scoped_refptr GetDesktopDevice() = 0; +#endif + virtual scoped_refptr CreateAudioSource( + const string audio_source_label, + RTCAudioSource::SourceType source_type = + RTCAudioSource::SourceType::kMicrophone) = 0; + + virtual scoped_refptr CreateVideoSource( + scoped_refptr capturer, const string video_source_label, + scoped_refptr constraints) = 0; +#ifdef RTC_DESKTOP_DEVICE + virtual scoped_refptr CreateDesktopSource( + scoped_refptr capturer, + const string video_source_label, + scoped_refptr constraints) = 0; +#endif + virtual scoped_refptr CreateAudioTrack( + scoped_refptr source, const string track_id) = 0; + + virtual scoped_refptr CreateVideoTrack( + scoped_refptr source, const string track_id) = 0; + + virtual scoped_refptr CreateStream( + const string stream_id) = 0; + + virtual scoped_refptr GetRtpSenderCapabilities( + RTCMediaType media_type) = 0; + + virtual scoped_refptr GetRtpReceiverCapabilities( + RTCMediaType media_type) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_PEERCONNECTION_FACTORY_HXX diff --git a/third_party/libwebrtc/include/rtc_rtp_capabilities.h b/third_party/libwebrtc/include/rtc_rtp_capabilities.h new file mode 100644 index 0000000000..39b49765fc --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_capabilities.h @@ -0,0 +1,60 @@ +#ifndef LIB_WBBRTC_RTC_RTP_CAPABILITIES_HXX +#define LIB_WBBRTC_RTC_RTP_CAPABILITIES_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCRtpCodecCapability : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(); + + virtual void set_mime_type(const string& mime_type) = 0; + virtual void set_clock_rate(int clock_rate) = 0; + virtual void set_channels(int channels) = 0; + virtual void set_sdp_fmtp_line(const string& sdp_fmtp_line) = 0; + + virtual string mime_type() const = 0; + virtual int clock_rate() const = 0; + virtual int channels() const = 0; + virtual string sdp_fmtp_line() const = 0; + + protected: + virtual ~RTCRtpCodecCapability() {} +}; + +class RTCRtpHeaderExtensionCapability : public RefCountInterface { + public: + virtual const string uri() = 0; + virtual void set_uri(const string uri) = 0; + + virtual int preferred_id() = 0; + virtual void set_preferred_id(int value) = 0; + + virtual bool preferred_encrypt() = 0; + virtual void set_preferred_encrypt(bool value) = 0; +}; + +class RTCRtpCapabilities : public RefCountInterface { + public: + virtual const vector> codecs() = 0; + virtual void set_codecs( + const vector> codecs) = 0; + + virtual const vector> + header_extensions() = 0; + + virtual void set_header_extensions( + const vector> + header_extensions) = 0; + + // virtual const vector> fec() = 0; + // virtual void set_fec(const vector> fec) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WBBRTC_RTC_RTP_CAPABILITIES_HXX diff --git a/third_party/libwebrtc/include/rtc_rtp_parameters.h b/third_party/libwebrtc/include/rtc_rtp_parameters.h new file mode 100644 index 0000000000..3ef87155d5 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_parameters.h @@ -0,0 +1,243 @@ +#ifndef LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX +#define LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_types.h" + +namespace libwebrtc { + +enum class RTCRtpTransceiverDirection { + kSendRecv, + kSendOnly, + kRecvOnly, + kInactive, + kStopped, +}; + +enum class RTCFecMechanism { + RED, + RED_AND_ULPFEC, + FLEXFEC, +}; + +enum class RTCRtcpFeedbackType { + CCM, + LNTF, + NACK, + REMB, + TRANSPORT_CC, +}; + +enum class RTCRtcpFeedbackMessageType { + GENERIC_NACK, + PLI, + FIR, +}; + +enum class RTCDtxStatus { + DISABLED, + ENABLED, +}; + +enum class RTCDegradationPreference { + DISABLED, + MAINTAIN_FRAMERATE, + MAINTAIN_RESOLUTION, + BALANCED, +}; + +class RTCRtcpFeedback : public RefCountInterface { + virtual RTCRtcpFeedbackType type() = 0; + virtual void set_type(RTCRtcpFeedbackType value) = 0; + + virtual RTCRtcpFeedbackMessageType message_type() = 0; + virtual void set_message_type(RTCRtcpFeedbackMessageType value) = 0; + + virtual bool operator==(scoped_refptr o) = 0; + virtual bool operator!=(scoped_refptr o) = 0; +}; + +class RTCRtpExtension : public RefCountInterface { + public: + enum RTCFilter { + kDiscardEncryptedExtension, + kPreferEncryptedExtension, + kRequireEncryptedExtension, + }; + + virtual const string ToString() const = 0; + virtual bool operator==(scoped_refptr o) const = 0; + + virtual const string uri() = 0; + virtual void set_uri(const string uri) = 0; + + virtual int id() = 0; + virtual void set_id(int value) = 0; + + virtual bool encrypt() = 0; + virtual void set_encrypt(bool value) = 0; +}; + +class RtpFecParameters : public RefCountInterface { + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual RTCFecMechanism mechanism() = 0; + virtual void set_mechanism(RTCFecMechanism value) = 0; + + virtual bool operator==(const RtpFecParameters& o) const = 0; + virtual bool operator!=(const RtpFecParameters& o) const = 0; +}; + +class RTCRtpRtxParameters : public RefCountInterface { + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +class RTCRtpCodecParameters : public RefCountInterface { + public: + virtual const string mime_type() const = 0; + + virtual const string name() = 0; + virtual void set_name(const string name) = 0; + + virtual RTCMediaType kind() = 0; + virtual void set_kind(RTCMediaType value) = 0; + + virtual int payload_type() = 0; + virtual void set_payload_type(int value) = 0; + + virtual int clock_rate() = 0; + virtual void set_clock_rate(int value) = 0; + + virtual int num_channels() = 0; + virtual void set_num_channels(int value) = 0; + + virtual int max_ptime() = 0; + virtual void set_max_ptime(int value) = 0; + + virtual int ptime() = 0; + virtual void set_ptime(int value) = 0; + + virtual const vector> rtcp_feedback() = 0; + virtual void set_rtcp_feedback( + const vector> feecbacks) = 0; + + virtual const vector> parameters() = 0; + virtual void set_parameters(const map parameters) = 0; + + virtual bool operator==(scoped_refptr o) = 0; + virtual bool operator!=(scoped_refptr o) = 0; + + protected: + virtual ~RTCRtpCodecParameters() {} +}; + +class RTCRtcpParameters : public RefCountInterface { + public: + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual const string cname() = 0; + virtual void set_cname(const string) = 0; + + virtual bool reduced_size() = 0; + virtual void set_reduced_size(bool value) = 0; + + virtual bool mux() = 0; + virtual void set_mux(bool value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +enum class RTCPriority { + kVeryLow, + kLow, + kMedium, + kHigh, +}; + +class RTCRtpEncodingParameters : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(); + + virtual uint32_t ssrc() = 0; + virtual void set_ssrc(uint32_t value) = 0; + + virtual double bitrate_priority() = 0; + virtual void set_bitrate_priority(double value) = 0; + + virtual RTCPriority network_priority() = 0; + virtual void set_network_priority(RTCPriority value) = 0; + + virtual int max_bitrate_bps() = 0; + virtual void set_max_bitrate_bps(int value) = 0; + + virtual int min_bitrate_bps() = 0; + virtual void set_min_bitrate_bps(int value) = 0; + + virtual double max_framerate() = 0; + virtual void set_max_framerate(double value) = 0; + + virtual int num_temporal_layers() = 0; + virtual void set_num_temporal_layers(int value) = 0; + + virtual double scale_resolution_down_by() = 0; + virtual void set_scale_resolution_down_by(double value) = 0; + + virtual const string scalability_mode() = 0; + virtual void set_scalability_mode(const string mode) = 0; + + virtual bool active() = 0; + virtual void set_active(bool value) = 0; + + virtual const string rid() = 0; + virtual void set_rid(const string rid) = 0; + + virtual bool adaptive_ptime() = 0; + virtual void set_adaptive_ptime(bool value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +struct RTCRtpParameters : public RefCountInterface { + public: + // static scoped_refptr Create(); + virtual const string transaction_id() = 0; + virtual void set_transaction_id(const string id) = 0; + + virtual const string mid() = 0; + virtual void set_mid(const string mid) = 0; + + virtual const vector> codecs() = 0; + virtual void set_codecs( + const vector> codecs) = 0; + + virtual const vector> header_extensions() = 0; + virtual void set_header_extensions( + const vector> header_extensions) = 0; + + virtual const vector> encodings() = 0; + virtual void set_encodings( + const vector> encodings) = 0; + + virtual scoped_refptr rtcp_parameters() = 0; + virtual void set_rtcp_parameters( + scoped_refptr rtcp_parameters) = 0; + + virtual RTCDegradationPreference GetDegradationPreference() = 0; + virtual void SetDegradationPreference(RTCDegradationPreference value) = 0; + + virtual bool operator==(scoped_refptr o) const = 0; + virtual bool operator!=(scoped_refptr o) const = 0; +}; + +} // namespace libwebrtc +#endif // LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_rtp_receiver.h b/third_party/libwebrtc/include/rtc_rtp_receiver.h new file mode 100644 index 0000000000..a72f2b8a06 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_receiver.h @@ -0,0 +1,61 @@ +#ifndef LIB_WEBRTC_RTP_RECEIVER_HXX +#define LIB_WEBRTC_RTP_RECEIVER_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +// #include "rtc_frame_decryptor.h" +// #include "rtc_frame_encryptor.h" + +namespace libwebrtc { + +class RTCMediaTrack; +class RTCMediaStream; +class RTCDtlsTransport; + +class RTCRtpReceiverObserver { + public: + virtual void OnFirstPacketReceived(RTCMediaType media_type) = 0; + + protected: + virtual ~RTCRtpReceiverObserver() {} +}; + +class RTCRtpReceiver : public RefCountInterface { + public: + virtual scoped_refptr track() const = 0; + + virtual scoped_refptr dtls_transport() const = 0; + + virtual const vector stream_ids() const = 0; + + virtual vector> streams() const = 0; + + virtual RTCMediaType media_type() const = 0; + + virtual const string id() const = 0; + + virtual scoped_refptr parameters() const = 0; + + virtual bool set_parameters(scoped_refptr parameters) = 0; + + virtual void SetObserver(RTCRtpReceiverObserver* observer) = 0; + + virtual void SetJitterBufferMinimumDelay(double delay_seconds) = 0; + + // virtual Vector GetSources() const = 0; + + // virtual void SetFrameDecryptor( + // scoped_refptr frame_decryptor); + + // virtual scoped_refptr GetFrameDecryptor() const = 0; + + // virtual void SetDepacketizerToDecoderFrameTransformer( + // scoped_refptr frame_transformer) = 0; +}; + +} // namespace libwebrtc + +#endif // !LIB_WEBRTC_RTP_RECEIVER_H_ \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_rtp_sender.h b/third_party/libwebrtc/include/rtc_rtp_sender.h new file mode 100644 index 0000000000..9c2f73249b --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_sender.h @@ -0,0 +1,46 @@ +#ifndef LIB_WEBRTC_RTC_RTP_SENDER_HXX +#define LIB_WEBRTC_RTC_RTP_SENDER_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCMediaTrack; +class RTCDtlsTransport; +class RTCDtmfSender; + +class RTCRtpSender : public RefCountInterface { + public: + virtual bool set_track(scoped_refptr track) = 0; + + virtual scoped_refptr track() const = 0; + + virtual scoped_refptr dtls_transport() const = 0; + + virtual uint32_t ssrc() const = 0; + + virtual RTCMediaType media_type() const = 0; + + virtual const string id() const = 0; + + virtual const vector stream_ids() const = 0; + + virtual void set_stream_ids(const vector stream_ids) const = 0; + + virtual const vector> + init_send_encodings() const = 0; + + virtual scoped_refptr parameters() const = 0; + + virtual bool set_parameters( + const scoped_refptr parameters) = 0; + + virtual scoped_refptr dtmf_sender() const = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_TYPES_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_rtp_transceiver.h b/third_party/libwebrtc/include/rtc_rtp_transceiver.h new file mode 100644 index 0000000000..ecf24f45b6 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_rtp_transceiver.h @@ -0,0 +1,66 @@ +#ifndef LIB_WEBRTC_RTC_RTP_TRANSCEIVER_HXX +#define LIB_WEBRTC_RTC_RTP_TRANSCEIVER_HXX + +#include "base/refcount.h" +#include "rtc_rtp_capabilities.h" +#include "rtc_rtp_parameters.h" +#include "rtc_rtp_receiver.h" +#include "rtc_rtp_sender.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCRtpTransceiverInit : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create( + RTCRtpTransceiverDirection direction, const vector stream_ids, + const vector> encodings); + + virtual RTCRtpTransceiverDirection direction() = 0; + virtual void set_direction(RTCRtpTransceiverDirection value) = 0; + + virtual const vector stream_ids() = 0; + virtual void set_stream_ids(const vector ids) = 0; + + virtual const vector> + send_encodings() = 0; + virtual void set_send_encodings( + const vector> send_encodings) = 0; +}; + +class RTCRtpTransceiver : public RefCountInterface { + public: + virtual RTCMediaType media_type() const = 0; + + virtual const string mid() const = 0; + + virtual scoped_refptr sender() const = 0; + + virtual scoped_refptr receiver() const = 0; + + virtual bool Stopped() const = 0; + + virtual bool Stopping() const = 0; + + virtual RTCRtpTransceiverDirection direction() const = 0; + + virtual const string SetDirectionWithError( + RTCRtpTransceiverDirection new_direction) = 0; + + virtual RTCRtpTransceiverDirection current_direction() const = 0; + + virtual RTCRtpTransceiverDirection fired_direction() const = 0; + + virtual const string StopStandard() = 0; + + virtual void StopInternal() = 0; + + virtual void SetCodecPreferences( + vector> codecs) = 0; + + virtual const string transceiver_id() const = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_TYPES_HXX diff --git a/third_party/libwebrtc/include/rtc_session_description.h b/third_party/libwebrtc/include/rtc_session_description.h new file mode 100644 index 0000000000..84aa2abbc9 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_session_description.h @@ -0,0 +1,30 @@ +#ifndef LIB_WEBRTC_RTC_SESSION_DESCRIPTION_HXX +#define LIB_WEBRTC_RTC_SESSION_DESCRIPTION_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCSessionDescription : public RefCountInterface { + public: + enum SdpType { kOffer = 0, kPrAnswer, kAnswer }; + + static LIB_WEBRTC_API scoped_refptr Create( + const string type, const string sdp, SdpParseError* error); + + public: + virtual const string sdp() const = 0; + + virtual const string type() = 0; + + virtual SdpType GetType() = 0; + + virtual bool ToString(string& out) = 0; + + protected: + virtual ~RTCSessionDescription() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_SESSION_DESCRIPTION_HXX \ No newline at end of file diff --git a/third_party/libwebrtc/include/rtc_types.h b/third_party/libwebrtc/include/rtc_types.h new file mode 100644 index 0000000000..017386d071 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_types.h @@ -0,0 +1,113 @@ +#ifndef LIB_WEBRTC_RTC_TYPES_HXX +#define LIB_WEBRTC_RTC_TYPES_HXX + +#ifdef LIB_WEBRTC_API_EXPORTS +#define LIB_WEBRTC_API __declspec(dllexport) +#elif defined(LIB_WEBRTC_API_DLL) +#define LIB_WEBRTC_API __declspec(dllimport) +#elif !defined(WIN32) +#define LIB_WEBRTC_API __attribute__((visibility("default"))) +#else +#define LIB_WEBRTC_API +#endif + +#include "base/fixed_size_function.h" +#include "base/portable.h" +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" + +namespace libwebrtc { + +enum { kMaxIceServerSize = 8 }; + +// template +// using vector = bsp::inlined_vector; + +template +using map = std::map; + +enum class MediaSecurityType { kSRTP_None = 0, kSDES_SRTP, kDTLS_SRTP }; + +enum class RTCMediaType { AUDIO, VIDEO, DATA, UNSUPPORTED }; + +using string = portable::string; + +// template +// using map = portable::map; + +template +using vector = portable::vector; + +struct IceServer { + string uri; + string username; + string password; +}; + +enum class IceTransportsType { kNone, kRelay, kNoHost, kAll }; + +enum class TcpCandidatePolicy { + kTcpCandidatePolicyEnabled, + kTcpCandidatePolicyDisabled +}; + +enum class CandidateNetworkPolicy { + kCandidateNetworkPolicyAll, + kCandidateNetworkPolicyLowCost +}; + +enum class RtcpMuxPolicy { + kRtcpMuxPolicyNegotiate, + kRtcpMuxPolicyRequire, +}; + +enum BundlePolicy { + kBundlePolicyBalanced, + kBundlePolicyMaxBundle, + kBundlePolicyMaxCompat +}; + +enum class SdpSemantics { kPlanB, kUnifiedPlan }; + +struct RTCConfiguration { + IceServer ice_servers[kMaxIceServerSize]; + IceTransportsType type = IceTransportsType::kAll; + BundlePolicy bundle_policy = BundlePolicy::kBundlePolicyBalanced; + RtcpMuxPolicy rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyRequire; + CandidateNetworkPolicy candidate_network_policy = + CandidateNetworkPolicy::kCandidateNetworkPolicyAll; + TcpCandidatePolicy tcp_candidate_policy = + TcpCandidatePolicy::kTcpCandidatePolicyEnabled; + + int ice_candidate_pool_size = 0; + + MediaSecurityType srtp_type = MediaSecurityType::kDTLS_SRTP; + SdpSemantics sdp_semantics = SdpSemantics::kUnifiedPlan; + bool offer_to_receive_audio = true; + bool offer_to_receive_video = true; + + bool disable_ipv6 = false; + bool disable_ipv6_on_wifi = false; + int max_ipv6_networks = 5; + bool disable_link_local_networks = false; + int screencast_min_bitrate = -1; + + // private + bool use_rtp_mux = true; + uint32_t local_audio_bandwidth = 128; + uint32_t local_video_bandwidth = 512; +}; + +struct SdpParseError { + public: + // The sdp line that causes the error. + string line; + // Explains the error. + string description; +}; + +enum DesktopType { kScreen, kWindow }; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_TYPES_HXX diff --git a/third_party/libwebrtc/include/rtc_video_device.h b/third_party/libwebrtc/include/rtc_video_device.h new file mode 100644 index 0000000000..130849767c --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_device.h @@ -0,0 +1,41 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX +#define LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoCapturer : public RefCountInterface { + public: + virtual ~RTCVideoCapturer() {} + + virtual bool StartCapture() = 0; + + virtual bool CaptureStarted() = 0; + + virtual void StopCapture() = 0; +}; + +class RTCVideoDevice : public RefCountInterface { + public: + virtual uint32_t NumberOfDevices() = 0; + + virtual int32_t GetDeviceName(uint32_t deviceNumber, char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + uint32_t productUniqueIdUTF8Length = 0) = 0; + + virtual scoped_refptr Create(const char* name, + uint32_t index, size_t width, + size_t height, + size_t target_fps) = 0; + + protected: + virtual ~RTCVideoDevice() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX diff --git a/third_party/libwebrtc/include/rtc_video_frame.h b/third_party/libwebrtc/include/rtc_video_frame.h new file mode 100644 index 0000000000..a69023f052 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_frame.h @@ -0,0 +1,56 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_FRAME_HXX +#define LIB_WEBRTC_RTC_VIDEO_FRAME_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoFrame : public RefCountInterface { + public: + enum class Type { kARGB, kBGRA, kABGR, kRGBA }; + + enum VideoRotation { + kVideoRotation_0 = 0, + kVideoRotation_90 = 90, + kVideoRotation_180 = 180, + kVideoRotation_270 = 270 + }; + + public: + LIB_WEBRTC_API static scoped_refptr Create( + int width, int height, const uint8_t* buffer, int length); + + LIB_WEBRTC_API static scoped_refptr Create( + int width, int height, const uint8_t* data_y, int stride_y, + const uint8_t* data_u, int stride_u, const uint8_t* data_v, int stride_v); + + virtual scoped_refptr Copy() = 0; + + // The resolution of the frame in pixels. For formats where some planes are + // subsampled, this is the highest-resolution plane. + virtual int width() const = 0; + virtual int height() const = 0; + + virtual VideoRotation rotation() = 0; + + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataU() const = 0; + virtual const uint8_t* DataV() const = 0; + + // Returns the number of bytes between successive rows for a given plane. + virtual int StrideY() const = 0; + virtual int StrideU() const = 0; + virtual int StrideV() const = 0; + + virtual int ConvertToARGB(Type type, uint8_t* dst_argb, int dst_stride_argb, + int dest_width, int dest_height) = 0; + + protected: + virtual ~RTCVideoFrame() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_FRAME_HXX diff --git a/third_party/libwebrtc/include/rtc_video_renderer.h b/third_party/libwebrtc/include/rtc_video_renderer.h new file mode 100644 index 0000000000..7e81d463f6 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_renderer.h @@ -0,0 +1,18 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX +#define LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +template +class RTCVideoRenderer { + public: + virtual ~RTCVideoRenderer() {} + + virtual void OnFrame(VideoFrameT frame) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX diff --git a/third_party/libwebrtc/include/rtc_video_source.h b/third_party/libwebrtc/include/rtc_video_source.h new file mode 100644 index 0000000000..cb61abbb49 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_source.h @@ -0,0 +1,14 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX +#define LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoSource : public RefCountInterface { + public: + ~RTCVideoSource() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX diff --git a/third_party/libwebrtc/include/rtc_video_track.h b/third_party/libwebrtc/include/rtc_video_track.h new file mode 100644 index 0000000000..46850c5861 --- /dev/null +++ b/third_party/libwebrtc/include/rtc_video_track.h @@ -0,0 +1,24 @@ +#ifndef LIB_WEBRTC_RTC_VIDEO_TRACK_HXX +#define LIB_WEBRTC_RTC_VIDEO_TRACK_HXX + +#include "rtc_media_track.h" +#include "rtc_types.h" +#include "rtc_video_frame.h" +#include "rtc_video_renderer.h" + +namespace libwebrtc { + +class RTCVideoTrack : public RTCMediaTrack { + public: + virtual void AddRenderer( + RTCVideoRenderer>* renderer) = 0; + + virtual void RemoveRenderer( + RTCVideoRenderer>* renderer) = 0; + + protected: + ~RTCVideoTrack() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_TRACK_HXX diff --git a/third_party/libwebrtc/lib/elinux-arm64/libwebrtc.so b/third_party/libwebrtc/lib/elinux-arm64/libwebrtc.so new file mode 120000 index 0000000000..6f27e3aef9 --- /dev/null +++ b/third_party/libwebrtc/lib/elinux-arm64/libwebrtc.so @@ -0,0 +1 @@ +../linux-arm64/libwebrtc.so \ No newline at end of file diff --git a/third_party/libwebrtc/lib/elinux-x64/libwebrtc.so b/third_party/libwebrtc/lib/elinux-x64/libwebrtc.so new file mode 120000 index 0000000000..8d3fde68ca --- /dev/null +++ b/third_party/libwebrtc/lib/elinux-x64/libwebrtc.so @@ -0,0 +1 @@ +../linux-x64/libwebrtc.so \ No newline at end of file diff --git a/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so b/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so new file mode 100755 index 0000000000..9ff7018adb Binary files /dev/null and b/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so differ diff --git a/ios/WebRTC.framework/WebRTC b/third_party/libwebrtc/lib/linux-x64/libwebrtc.so similarity index 50% rename from ios/WebRTC.framework/WebRTC rename to third_party/libwebrtc/lib/linux-x64/libwebrtc.so index 4deb937067..19e93a1662 100755 Binary files a/ios/WebRTC.framework/WebRTC and b/third_party/libwebrtc/lib/linux-x64/libwebrtc.so differ diff --git a/third_party/libwebrtc/lib/win64/libwebrtc.dll b/third_party/libwebrtc/lib/win64/libwebrtc.dll new file mode 100644 index 0000000000..665183e7b6 Binary files /dev/null and b/third_party/libwebrtc/lib/win64/libwebrtc.dll differ diff --git a/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib b/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib new file mode 100644 index 0000000000..c982e9577b Binary files /dev/null and b/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib differ diff --git a/third_party/svpng/LICENSE b/third_party/svpng/LICENSE new file mode 100644 index 0000000000..c4063034b2 --- /dev/null +++ b/third_party/svpng/LICENSE @@ -0,0 +1,26 @@ +Copyright (C) 2017 Milo Yip. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of pngout nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/third_party/svpng/svpng.hpp b/third_party/svpng/svpng.hpp new file mode 100644 index 0000000000..aa2332429d --- /dev/null +++ b/third_party/svpng/svpng.hpp @@ -0,0 +1,110 @@ +/* +Copyright (C) 2017 Milo Yip. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of pngout nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +/*! \file + \brief svpng() is a minimalistic C function for saving RGB/RGBA image into uncompressed PNG. + \author Milo Yip + \version 0.1.1 + \copyright MIT license + \sa http://github.com/miloyip/svpng +*/ + +#ifndef SVPNG_INC_ +#define SVPNG_INC_ + +/*! \def SVPNG_LINKAGE + \brief User customizable linkage for svpng() function. + By default this macro is empty. + User may define this macro as static for static linkage, + and/or inline in C99/C++, etc. +*/ +#ifndef SVPNG_LINKAGE +#define SVPNG_LINKAGE +#endif + +/*! \def SVPNG_OUTPUT + \brief User customizable output stream. + By default, it uses C file descriptor and fputc() to output bytes. + In C++, for example, user may use std::ostream or std::vector instead. +*/ +#ifndef SVPNG_OUTPUT +#include +#define SVPNG_OUTPUT FILE* fp +#endif + +/*! \def SVPNG_PUT + \brief Write a byte +*/ +#ifndef SVPNG_PUT +#define SVPNG_PUT(u) fputc(u, fp) +#endif + + +/*! + \brief Save a RGB/RGBA image in PNG format. + \param SVPNG_OUTPUT Output stream (by default using file descriptor). + \param w Width of the image. (<16383) + \param h Height of the image. + \param img Image pixel data in 24-bit RGB or 32-bit RGBA format. + \param alpha Whether the image contains alpha channel. +*/ +SVPNG_LINKAGE void svpng(SVPNG_OUTPUT, unsigned w, unsigned h, const unsigned char* img, int alpha) { + static const unsigned t[] = { 0, 0x1db71064, 0x3b6e20c8, 0x26d930ac, 0x76dc4190, 0x6b6b51f4, 0x4db26158, 0x5005713c, + /* CRC32 Table */ 0xedb88320, 0xf00f9344, 0xd6d6a3e8, 0xcb61b38c, 0x9b64c2b0, 0x86d3d2d4, 0xa00ae278, 0xbdbdf21c }; + unsigned a = 1, b = 0, c, p = w * (alpha ? 4 : 3) + 1, x, y, i; /* ADLER-a, ADLER-b, CRC, pitch */ +#define SVPNG_U8A(ua, l) for (i = 0; i < l; i++) SVPNG_PUT((ua)[i]); +#define SVPNG_U32(u) do { SVPNG_PUT((u) >> 24); SVPNG_PUT(((u) >> 16) & 255); SVPNG_PUT(((u) >> 8) & 255); SVPNG_PUT((u) & 255); } while(0) +#define SVPNG_U8C(u) do { SVPNG_PUT(u); c ^= (u); c = (c >> 4) ^ t[c & 15]; c = (c >> 4) ^ t[c & 15]; } while(0) +#define SVPNG_U8AC(ua, l) for (i = 0; i < l; i++) SVPNG_U8C((ua)[i]) +#define SVPNG_U16LC(u) do { SVPNG_U8C((u) & 255); SVPNG_U8C(((u) >> 8) & 255); } while(0) +#define SVPNG_U32C(u) do { SVPNG_U8C((u) >> 24); SVPNG_U8C(((u) >> 16) & 255); SVPNG_U8C(((u) >> 8) & 255); SVPNG_U8C((u) & 255); } while(0) +#define SVPNG_U8ADLER(u) do { SVPNG_U8C(u); a = (a + (u)) % 65521; b = (b + a) % 65521; } while(0) +#define SVPNG_BEGIN(s, l) do { SVPNG_U32(l); c = ~0U; SVPNG_U8AC(s, 4); } while(0) +#define SVPNG_END() SVPNG_U32(~c) + SVPNG_U8A("\x89PNG\r\n\32\n", 8); /* Magic */ + SVPNG_BEGIN("IHDR", 13); /* IHDR chunk { */ + SVPNG_U32C(w); SVPNG_U32C(h); /* Width & Height (8 bytes) */ + SVPNG_U8C(8); SVPNG_U8C(alpha ? 6 : 2); /* Depth=8, Color=True color with/without alpha (2 bytes) */ + SVPNG_U8AC("\0\0\0", 3); /* Compression=Deflate, Filter=No, Interlace=No (3 bytes) */ + SVPNG_END(); /* } */ + SVPNG_BEGIN("IDAT", 2 + h * (5 + p) + 4); /* IDAT chunk { */ + SVPNG_U8AC("\x78\1", 2); /* Deflate block begin (2 bytes) */ + for (y = 0; y < h; y++) { /* Each horizontal line makes a block for simplicity */ + SVPNG_U8C(y == h - 1); /* 1 for the last block, 0 for others (1 byte) */ + SVPNG_U16LC(p); SVPNG_U16LC(~p); /* Size of block in little endian and its 1's complement (4 bytes) */ + SVPNG_U8ADLER(0); /* No filter prefix (1 byte) */ + for (x = 0; x < p - 1; x++, img++) + SVPNG_U8ADLER(*img); /* Image pixel data */ + } + SVPNG_U32C((b << 16) | a); /* Deflate block end with adler (4 bytes) */ + SVPNG_END(); /* } */ + SVPNG_BEGIN("IEND", 0); SVPNG_END(); /* IEND chunk {} */ +} + +#endif /* SVPNG_INC_ */ \ No newline at end of file diff --git a/windows/.gitignore b/windows/.gitignore new file mode 100644 index 0000000000..c765fa7864 --- /dev/null +++ b/windows/.gitignore @@ -0,0 +1,4 @@ +flutter/ + +# Visual Studio files +*.user diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt new file mode 100644 index 0000000000..bb6405767f --- /dev/null +++ b/windows/CMakeLists.txt @@ -0,0 +1,53 @@ +cmake_minimum_required(VERSION 3.15) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed +set(PLUGIN_NAME "flutter_webrtc_plugin") + +add_definitions(-DLIB_WEBRTC_API_DLL) +add_definitions(-DRTC_DESKTOP_DEVICE) + +add_library(${PLUGIN_NAME} SHARED + "../common/cpp/src/flutter_common.cc" + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "flutter_webrtc_plugin.cc" + "task_runner_windows.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE + flutter + flutter_wrapper_plugin + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/win64/libwebrtc.dll.lib" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/win64/libwebrtc.dll" + PARENT_SCOPE +) diff --git a/windows/flutter_webrtc/flutter_web_r_t_c_plugin.h b/windows/flutter_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..5065110e4f --- /dev/null +++ b/windows/flutter_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,29 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __declspec(dllexport) +#else +#define FLUTTER_PLUGIN_EXPORT __declspec(dllimport) +#endif + +namespace flutter_webrtc_plugin { +class FlutterWebRTC; +} // namespace flutter_webrtc_plugin + +#if defined(__cplusplus) +extern "C" { +#endif + +FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar); + +FLUTTER_PLUGIN_EXPORT flutter_webrtc_plugin::FlutterWebRTC* FlutterWebRTCPluginSharedInstance(); + +#if defined(__cplusplus) +} // extern "C" +#endif + +#endif // PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ diff --git a/windows/flutter_webrtc_plugin.cc b/windows/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..f766d98239 --- /dev/null +++ b/windows/flutter_webrtc_plugin.cc @@ -0,0 +1,84 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" +#include "task_runner_windows.h" + +#include + +const char* kChannelName = "FlutterWebRTC.Method"; +static flutter_webrtc_plugin::FlutterWebRTC* g_shared_instance = nullptr; + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + TaskRunner* task_runner() { return task_runner_.get(); } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()), + task_runner_(std::make_unique()) { + webrtc_ = std::make_unique(this); + g_shared_instance = webrtc_.get(); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; + std::unique_ptr task_runner_; +}; + +} // namespace flutter_webrtc_plugin + + +void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar) { + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + flutter::PluginRegistrarManager::GetInstance() + ->GetRegistrar(registrar)); +} + +flutter_webrtc_plugin::FlutterWebRTC* FlutterWebRTCPluginSharedInstance() { + return g_shared_instance; +} \ No newline at end of file diff --git a/windows/task_runner_windows.cc b/windows/task_runner_windows.cc new file mode 100644 index 0000000000..0c47ab519e --- /dev/null +++ b/windows/task_runner_windows.cc @@ -0,0 +1,106 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. + + #include "task_runner_windows.h" + + #include + #include + + namespace flutter_webrtc_plugin { + + TaskRunnerWindows::TaskRunnerWindows() { + WNDCLASS window_class = RegisterWindowClass(); + window_handle_ = + CreateWindowEx(0, window_class.lpszClassName, L"", 0, 0, 0, 0, 0, + HWND_MESSAGE, nullptr, window_class.hInstance, nullptr); + + if (window_handle_) { + SetWindowLongPtr(window_handle_, GWLP_USERDATA, + reinterpret_cast(this)); + } else { + auto error = GetLastError(); + LPWSTR message = nullptr; + FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | + FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, error, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + reinterpret_cast(&message), 0, NULL); + OutputDebugString(message); + LocalFree(message); + } + } + + TaskRunnerWindows::~TaskRunnerWindows() { + if (window_handle_) { + DestroyWindow(window_handle_); + window_handle_ = nullptr; + } + UnregisterClass(window_class_name_.c_str(), nullptr); + } + + void TaskRunnerWindows::EnqueueTask(TaskClosure task) { + { + std::lock_guard lock(tasks_mutex_); + tasks_.push(task); + } + if (!PostMessage(window_handle_, WM_NULL, 0, 0)) { + DWORD error_code = GetLastError(); + std::cerr << "Failed to post message to main thread; error_code: " + << error_code << std::endl; + } + } + + void TaskRunnerWindows::ProcessTasks() { + // Even though it would usually be sufficient to process only a single task + // whenever we receive the message, if the message queue happens to be full, + // we might not receive a message for each individual task. + for (;;) { + std::lock_guard lock(tasks_mutex_); + if (tasks_.empty()) break; + TaskClosure task = tasks_.front(); + tasks_.pop(); + task(); + } + } + + WNDCLASS TaskRunnerWindows::RegisterWindowClass() { + window_class_name_ = L"FlutterWebRTCWindowsTaskRunnerWindow"; + + WNDCLASS window_class{}; + window_class.hCursor = nullptr; + window_class.lpszClassName = window_class_name_.c_str(); + window_class.style = 0; + window_class.cbClsExtra = 0; + window_class.cbWndExtra = 0; + window_class.hInstance = GetModuleHandle(nullptr); + window_class.hIcon = nullptr; + window_class.hbrBackground = 0; + window_class.lpszMenuName = nullptr; + window_class.lpfnWndProc = WndProc; + RegisterClass(&window_class); + return window_class; + } + + LRESULT + TaskRunnerWindows::HandleMessage(UINT const message, WPARAM const wparam, + LPARAM const lparam) noexcept { + switch (message) { + case WM_NULL: + ProcessTasks(); + return 0; + } + return DefWindowProcW(window_handle_, message, wparam, lparam); + } + + LRESULT TaskRunnerWindows::WndProc(HWND const window, UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept { + if (auto* that = reinterpret_cast( + GetWindowLongPtr(window, GWLP_USERDATA))) { + return that->HandleMessage(message, wparam, lparam); + } else { + return DefWindowProc(window, message, wparam, lparam); + } + } + + } // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/windows/task_runner_windows.h b/windows/task_runner_windows.h new file mode 100644 index 0000000000..f86c99d3f8 --- /dev/null +++ b/windows/task_runner_windows.h @@ -0,0 +1,55 @@ +// Copyright 2024 The Flutter Authors. All rights reserved. + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. + #ifndef PACKAGES_FLUTTER_WEBRTC_WINDOWS_TASK_RUNNER_WINDOW_H_ + #define PACKAGES_FLUTTER_WEBRTC_WINDOWS_TASK_RUNNER_WINDOW_H_ + + #include + + #include + #include + #include + #include + #include + + #include "task_runner.h" + + namespace flutter_webrtc_plugin { + + // Hidden HWND responsible for processing camera tasks on main thread + // Adapted from Flutter Engine, see: + // https://github.com/flutter/flutter/issues/134346#issuecomment-2141023146 + // and: + // https://github.com/flutter/engine/blob/d7c0bcfe7a30408b0722c9d47d8b0b1e4cdb9c81/shell/platform/windows/task_runner_window.h + class TaskRunnerWindows : public TaskRunner { + public: + virtual void EnqueueTask(TaskClosure task); + + TaskRunnerWindows(); + ~TaskRunnerWindows(); + + private: + void ProcessTasks(); + + WNDCLASS RegisterWindowClass(); + + LRESULT + HandleMessage(UINT const message, WPARAM const wparam, + LPARAM const lparam) noexcept; + + static LRESULT CALLBACK WndProc(HWND const window, UINT const message, + WPARAM const wparam, + LPARAM const lparam) noexcept; + + HWND window_handle_; + std::wstring window_class_name_; + std::mutex tasks_mutex_; + std::queue tasks_; + + // Prevent copying. + TaskRunnerWindows(TaskRunnerWindows const&) = delete; + TaskRunnerWindows& operator=(TaskRunnerWindows const&) = delete; + }; + } // namespace flutter_webrtc_plugin + + #endif // PACKAGES_FLUTTER_WEBRTC_WINDOWS_TASK_RUNNER_WINDOW_H_ \ No newline at end of file