Flutter

This document mainly introduces how to use TRTC Flutter SDK to implement custom audio raw data acquisition.

Acquiring audio raw data

Flutter TRTC SDK provides two ways to acquire audio raw data:
Native access.
Direct use of Flutter's Dart interface.
Since transferring high-frequency and large audio raw data from Native to Dart layer consumes more performance, we recommend using Native access to acquire audio raw data.

1. Native access

The specific access process and access effect can be experienced using the demo.
1.1 Listen to audio raw data at the Native layer and acquire audio raw data.
java
swift
void enableTRTCAudioFrameDelegate() {
TRTCCloud.sharedInstance(getApplicationContext()).setAudioFrameListener(new AudioFrameListener());
result.success("");
}

void disableTRTCAudioFrameDelegate() {
TRTCCloud.sharedInstance(getApplicationContext()).setAudioFrameListener(null);
result.success("");
}

class AudioFrameListener implements TRTCCloudListener.TRTCAudioFrameListener {
@Override
public void onCapturedAudioFrame(TRTCCloudDef.TRTCAudioFrame trtcAudioFrame) {
// TODO
}

@Override
public void onLocalProcessedAudioFrame(TRTCCloudDef.TRTCAudioFrame trtcAudioFrame) {
// TODO
}

@Override
public void onRemoteUserAudioFrame(TRTCCloudDef.TRTCAudioFrame trtcAudioFrame, String s) {
// TODO
}

@Override
public void onMixedPlayAudioFrame(TRTCCloudDef.TRTCAudioFrame trtcAudioFrame) {
// TODO
}

@Override
public void onMixedAllAudioFrame(TRTCCloudDef.TRTCAudioFrame trtcAudioFrame) {
// TODO
}

@Override
public void onVoiceEarMonitorAudioFrame(TRTCCloudDef.TRTCAudioFrame trtcAudioFrame) {
// TODO
}
}
let listener = AudioFrameProcessListener()
func enableTRTCAudioFrameDelegate() {
TRTCCloud.sharedInstance().setAudioFrameDelegate(listener)
result(nil)
}

func disableTRTCAudioFrameDelegate() {
TRTCCloud.sharedInstance().setAudioFrameDelegate(nil)
result(nil)
}

class AudioFrameProcessListener: NSObject, TRTCAudioFrameDelegate {
func onCapturedAudioFrame(_ frame: TRTCAudioFrame) {
//MARK: TODO
}
func onLocalProcessedAudioFrame(_ frame: TRTCAudioFrame) {
// MARK: TODO
}

func onRemoteUserAudioFrame(_ frame: TRTCAudioFrame, userId: String) {
// MARK: TODO
}

func onMixedAllAudioFrame(_ frame: TRTCAudioFrame) {
// MARK: TODO
}

func onMixedPlay(_ frame: TRTCAudioFrame) {
// MARK: TODO
}

func onVoiceEarMonitorAudioFrame(_ frame: TRTCAudioFrame) {
// MARK: TODO
}
}
1.2 Use Method Channel to implement start/stop acquisition of audio raw data.
Step 1: Implement the start/stop interface for acquiring audio raw data at the Dart layer.
final channel = MethodChannel('TRCT_FLUTTER_EXAMPLE');
void enableAudioFrame() async {
await channel.invokeMethod('enableTRTCAudioFrameDelegate');
}

void disableAudioFrame() async {
await channel.invokeMethod('disableTRTCAudioFrameDelegate');
}
Step 2: Implement the start/stop interface for acquiring audio raw data at the Native layer.
java
swift
public class MainActivity extends FlutterActivity {
private static final String channelName = "TRCT_FLUTTER_EXAMPLE";
private MethodChannel channel;
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {
super.configureFlutterEngine(flutterEngine);
channel = new MethodChannel(flutterEngine.getDartExecutor().getBinaryMessenger(), channelName);
channel.setMethodCallHandler(((call, result) -> {
switch (call.method) {
case "enableTRTCAudioFrameDelegate":
enableTRTCAudioFrameDelegate();
break;
case "disableTRTCAudioFrameDelegate":
disableTRTCAudioFrameDelegate();
break;
default:
break;
}
}));
}
}
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
var channel: FlutterMethodChannel?
override func application(_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
GeneratedPluginRegistrant.register(with: self)
guard let controller = window?.rootViewController as? FlutterViewController else {
fatalError("Invalid root view controller")
}
channel = FlutterMethodChannel(name: "TRCT_FLUTTER_EXAMPLE", binaryMessenger: controller.binaryMessenger)
channel?.setMethodCallHandler({ [weak self] call, result in
guard let self = self else { return }
switch (call.method) {
case "enableTRTCAudioFrameDelegate":
self.enableTRTCAudioFrameDelegate()
break
case "disableTRTCAudioFrameDelegate":
self.disableTRTCAudioFrameDelegate()
break
default:
break
}
})
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
}

2. Access to Flutter layer interface

Currently, the Flutter Dart interface only supports the use of the onCapturedAudioFrame interface. The specific usage is as follows:
TRTCCloud trtcCloud = (await TRTCCloud.sharedInstance())!;

// Start acquiring audio raw data
final audioFrameListener = TRTCAudioFrameListener( onCapturedAudioFrame: (audioFrame) {
// TODO } ); trtcCloud.setAudioFrameListener(audioFrameListener);
// Stop acquiring audio raw data
trtcCloud.setAudioFrameListener(null);