flutter webRTC how to send audio file Uint8List

i connected peer to peer with webRTC however when i use TTS and get uint8list data about TTS data i try to send or write to other peer about voice from TTS.
first i tried to write in _localStream!.getAudioTracks()[0] but cant find way.
second i try to make a audio track and add to RTCPeerConnection but cant find way to make MediaStreamTrack or MediaStream from Uint 8 List

is there any way?

  late final WebSocket socket;
  final _localRenderer = RTCVideoRenderer();
  RTCVideoRenderer get localRenderer => _localRenderer;
  final _remoteRenderer = RTCVideoRenderer();
  RTCVideoRenderer get remoteRenderer => _remoteRenderer;

  MediaStream? _localStream;
  MediaStream? get localStream => _localStream;

  RTCPeerConnection? _pc;

  Future init({required String clientIdUri, required String channelARN}) async {
    getCredentials().then((value) {
      clientIdUri = value.userIdentityId!.split(':').last;
      AwsCognito()
          .sign(
        uri: Uri.parse(
            "wss://<uri>"),
        accessKey: value.accessKeyId!,
        secretKey: value.secretAccessKey!,
        sessionToken: value.sessionToken!,
        wssUri: Uri.parse(
            "wss://<uri>),
        region: "ap-northeast-2",
      )
          .then((valueSign) {
        service!
            .getIceServerConfig(
          channelARN:
              "arn:aws:kinesisvideo:ap-northeast-2:<uri>,
        )
            .then((value) async {
          if (value.iceServerList != null) {
            await _localRenderer.initialize();
            await _remoteRenderer.initialize();

            await joinRoom(value.iceServerList!);
            await connectSocket(wssUri: valueSign, clientId: clientIdUri);
          }
        });
      });
    });

    notifyListeners();
  }

// this is 
  Future<void> sendTtsData(Uint8List data) async {
    
    // MediaStream inboundStream = MediaStream();
    // videoElem.srcObject = inboundStream;

    // _pc!._localStream!.addTrack(MediaStreamTrack());
    // // _localStream!.removeTrack(MediaStreamTrack());
    //
    // // for (MediaStreamTrack track in _localStream!.getTracks()) {
    // //   RTCRtpSender sender = await _pc!.addTrack(track, _localStream!);

    if (_localStream!.getAudioTracks().isNotEmpty) {
      // MediaStreamTrack audioTtsTrack = _localStream!.addTrack(audioTtsTrack);
      MediaStreamTrack ttsStream = Buffer.fromBuffer(data.buffer);
      _localStream!.getAudioTracks().add(DataT);


      // void createMediaStream() {
      //   if (_audioData != null) {
      //     _mediaStream = MediaStream.fromBytes('audio/mpeg', _audioData);
      //   }
      // }

      _pc!.addStream(stream)

      // _pc!.createDtmfSender(ttsStream);
      // _pc!.addStream().
    }

    // sendData(socket!, data);
    notifyListeners();
  }

Read more here: Source link