|
|
|
@ -164,14 +164,6 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void _onButtonTap() async {
|
|
|
|
void _onButtonTap() async {
|
|
|
|
// #Pangea
|
|
|
|
|
|
|
|
// https://github.com/pangeachat/client/issues/3458
|
|
|
|
|
|
|
|
if (widget.event != null && !widget.event!.status.isSent) {
|
|
|
|
|
|
|
|
widget.chatController.showToolbar(widget.event!);
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// Pangea#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
WidgetsBinding.instance.addPostFrameCallback((_) {
|
|
|
|
WidgetsBinding.instance.addPostFrameCallback((_) {
|
|
|
|
ScaffoldMessenger.of(matrix.context).clearMaterialBanners();
|
|
|
|
ScaffoldMessenger.of(matrix.context).clearMaterialBanners();
|
|
|
|
});
|
|
|
|
});
|
|
|
|
@ -441,13 +433,6 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
|
|
final duration = Duration(milliseconds: durationInt);
|
|
|
|
final duration = Duration(milliseconds: durationInt);
|
|
|
|
_durationString = duration.minuteSecondString;
|
|
|
|
_durationString = duration.minuteSecondString;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// #Pangea
|
|
|
|
|
|
|
|
if (widget.autoplay &&
|
|
|
|
|
|
|
|
(widget.event == null || widget.event!.status.isSent)) {
|
|
|
|
|
|
|
|
_onButtonTap();
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// Pangea#
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@override
|
|
|
|
@override
|
|
|
|
@ -470,7 +455,12 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
|
|
final fileDescription = widget.event?.fileDescription;
|
|
|
|
final fileDescription = widget.event?.fileDescription;
|
|
|
|
// Pangea#
|
|
|
|
// Pangea#
|
|
|
|
|
|
|
|
|
|
|
|
return StreamBuilder<Object>(
|
|
|
|
// #Pangea
|
|
|
|
|
|
|
|
// return StreamBuilder<Object>(
|
|
|
|
|
|
|
|
return AbsorbPointer(
|
|
|
|
|
|
|
|
absorbing: widget.event != null && !widget.event!.status.isSent,
|
|
|
|
|
|
|
|
child: StreamBuilder<Object>(
|
|
|
|
|
|
|
|
// Pangea#
|
|
|
|
stream: audioPlayer == null
|
|
|
|
stream: audioPlayer == null
|
|
|
|
? null
|
|
|
|
? null
|
|
|
|
: StreamGroup.merge([
|
|
|
|
: StreamGroup.merge([
|
|
|
|
@ -484,8 +474,8 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
|
|
audioPlayer?.position.inMilliseconds.toDouble() ?? 0.0;
|
|
|
|
audioPlayer?.position.inMilliseconds.toDouble() ?? 0.0;
|
|
|
|
if (currentPosition > maxPosition) currentPosition = maxPosition;
|
|
|
|
if (currentPosition > maxPosition) currentPosition = maxPosition;
|
|
|
|
|
|
|
|
|
|
|
|
final wavePosition =
|
|
|
|
final wavePosition = (currentPosition / maxPosition) *
|
|
|
|
(currentPosition / maxPosition) * AudioPlayerWidget.wavesCount;
|
|
|
|
AudioPlayerWidget.wavesCount;
|
|
|
|
|
|
|
|
|
|
|
|
final statusText = audioPlayer == null
|
|
|
|
final statusText = audioPlayer == null
|
|
|
|
? _durationString ?? '00:00'
|
|
|
|
? _durationString ?? '00:00'
|
|
|
|
@ -565,7 +555,8 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
|
|
borderRadius:
|
|
|
|
borderRadius:
|
|
|
|
BorderRadius.circular(64),
|
|
|
|
BorderRadius.circular(64),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
height: 32 * (waveform[i] / 1024),
|
|
|
|
height:
|
|
|
|
|
|
|
|
32 * (waveform[i] / 1024),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
@ -722,6 +713,7 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
|
|
),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
);
|
|
|
|
},
|
|
|
|
},
|
|
|
|
|
|
|
|
),
|
|
|
|
);
|
|
|
|
);
|
|
|
|
},
|
|
|
|
},
|
|
|
|
);
|
|
|
|
);
|
|
|
|
|