Audio section widget (#744)

first draft of word focus listening activities using text to speech library
pull/1428/head
ggurdin 1 year ago committed by GitHub
parent 925d7506ef
commit ac80e6217c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -158,4 +158,10 @@
android:name="flutterEmbedding"
android:value="2" />
</application>
<queries>
<intent>
<action android:name="android.intent.action.TTS_SERVICE" />
</intent>
</queries>
</manifest>

@ -4239,6 +4239,10 @@
"l2SupportAlpha": "Alpha",
"l2SupportBeta": "Beta",
"l2SupportFull": "Full",
"voiceNotAvailable": "It looks like you don't have a voice installed for this language.",
"openVoiceSettings": "Click here to open voice settings",
"playAudio": "Play",
"stop": "Stop",
"grammarCopySCONJ": "Subordinating Conjunction",
"grammarCopyNUM": "Number",
"grammarCopyVERB": "Verb",

@ -25,7 +25,13 @@ class AudioPlayerWidget extends StatefulWidget {
static String? currentId;
static const int wavesCount = 40;
// #Pangea
// static const int wavesCount = 40;
static const int wavesCount = kIsWeb ? 100 : 40;
final int? sectionStartMS;
final int? sectionEndMS;
// Pangea#
const AudioPlayerWidget(
this.event, {
@ -33,6 +39,8 @@ class AudioPlayerWidget extends StatefulWidget {
// #Pangea
this.matrixFile,
this.autoplay = false,
this.sectionStartMS,
this.sectionEndMS,
// Pangea#
super.key,
});
@ -72,6 +80,24 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
super.dispose();
}
// #Pangea
// @override
// void didUpdateWidget(covariant oldWidget) {
// if ((oldWidget.sectionEndMS != widget.sectionEndMS) ||
// (oldWidget.sectionStartMS != widget.sectionStartMS)) {
// debugPrint('selection changed');
// if (widget.sectionStartMS != null) {
// audioPlayer?.seek(Duration(milliseconds: widget.sectionStartMS!));
// audioPlayer?.play();
// } else {
// audioPlayer?.stop();
// audioPlayer?.seek(null);
// }
// }
// super.didUpdateWidget(oldWidget);
// }
// Pangea#
Future<void> _downloadAction() async {
// #Pangea
// if (status != AudioPlayerStatus.notDownloaded) return;
@ -160,7 +186,16 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
AudioPlayerWidget.wavesCount)
.round();
});
// #Pangea
// if (widget.sectionStartMS != null &&
// widget.sectionEndMS != null &&
// state.inMilliseconds.toDouble() >= widget.sectionEndMS!) {
// audioPlayer.stop();
// audioPlayer.seek(Duration(milliseconds: widget.sectionStartMS!));
// } else
if (state.inMilliseconds.toDouble() == maxPosition) {
// if (state.inMilliseconds.toDouble() == maxPosition) {
// Pangea#
audioPlayer.stop();
audioPlayer.seek(null);
}
@ -194,6 +229,11 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
}
// Pangea#
}
// #Pangea
// if (widget.sectionStartMS != null) {
// audioPlayer.seek(Duration(milliseconds: widget.sectionStartMS!));
// }
// Pangea#
audioPlayer.play().onError(
ErrorReporter(context, 'Unable to play audio message')
.onErrorCallback,
@ -311,6 +351,17 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
final statusText = this.statusText ??= _durationString ?? '00:00';
final audioPlayer = this.audioPlayer;
// #Pangea
final msPerWave = (maxPosition / AudioPlayerWidget.wavesCount);
final int? startWave = widget.sectionStartMS != null && msPerWave > 0
? (widget.sectionStartMS! / msPerWave).floor()
: null;
final int? endWave = widget.sectionEndMS != null && msPerWave > 0
? (widget.sectionEndMS! / msPerWave).ceil()
: null;
// Pangea#
return Padding(
// #Pangea
// padding: const EdgeInsets.all(12.0),
@ -352,44 +403,101 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
// #Pangea
// const SizedBox(width: 8),
const SizedBox(width: 5),
// Pangea#
Row(
mainAxisSize: MainAxisSize.min,
children: [
for (var i = 0; i < AudioPlayerWidget.wavesCount; i++)
GestureDetector(
onTapDown: (_) => audioPlayer?.seek(
Duration(
milliseconds:
(maxPosition / AudioPlayerWidget.wavesCount).round() *
i,
),
),
child: Container(
height: 32,
color: widget.color.withAlpha(0),
alignment: Alignment.center,
child: Opacity(
opacity: currentPosition > i ? 1 : 0.5,
child: Container(
margin: const EdgeInsets.symmetric(horizontal: 1),
decoration: BoxDecoration(
color: widget.color,
borderRadius: BorderRadius.circular(2),
// Row(
// mainAxisSize: MainAxisSize.min,
// children: [
// for (var i = 0; i < AudioPlayerWidget.wavesCount; i++)
// GestureDetector(
// onTapDown: (_) => audioPlayer?.seek(
// Duration(
// milliseconds:
// (maxPosition / AudioPlayerWidget.wavesCount).round() *
// i,
// ),
// ),
// child: Container(
// height: 32,
// color: widget.color.withAlpha(0),
// alignment: Alignment.center,
// child: Opacity(
// opacity: currentPosition > i ? 1 : 0.5,
// child: Container(
// margin: const EdgeInsets.symmetric(horizontal: 1),
// decoration: BoxDecoration(
// color: widget.color,
// borderRadius: BorderRadius.circular(2),
// ),
// // #Pangea
// // width: 2,
// width: 1,
// // Pangea#
// height: 32 * (waveform[i] / 1024),
// ),
// ),
// ),
// ),
// ],
// ),
// const SizedBox(width: 8),
Expanded(
child: Row(
children: [
for (var i = 0; i < AudioPlayerWidget.wavesCount; i++)
Builder(
builder: (context) {
final double barOpacity = currentPosition > i ? 1 : 0.5;
return Expanded(
child: GestureDetector(
onTapDown: (_) {
audioPlayer?.seek(
Duration(
milliseconds:
(maxPosition / AudioPlayerWidget.wavesCount)
.round() *
i,
),
);
},
child: Stack(
children: [
Container(
margin: const EdgeInsets.symmetric(
horizontal: 0.5,
),
decoration: BoxDecoration(
color: widget.color.withOpacity(barOpacity),
borderRadius: BorderRadius.circular(2),
),
height: 32 * (waveform[i] / 1024),
),
],
),
),
// #Pangea
// width: 2,
width: 1,
// Pangea#
height: 32 * (waveform[i] / 1024),
),
),
);
// return Container(
// height: 32,
// width: 2,
// alignment: Alignment.center,
// child: Opacity(
// opacity: barOpacity,
// child: Container(
// margin: const EdgeInsets.symmetric(
// horizontal: 1,
// ),
// decoration: BoxDecoration(
// color: widget.color,
// borderRadius: BorderRadius.circular(2),
// ),
// height: 32 * (waveform[i] / 1024),
// width: 2,
// ),
// ),
// );
},
),
),
],
],
),
),
// #Pangea
// const SizedBox(width: 8),
const SizedBox(width: 5),
// SizedBox(
// width: 36,

@ -162,7 +162,7 @@ class PracticeGenerationController {
activityType: ActivityTypeEnum.multipleChoice,
langCode: event.messageDisplayLangCode,
msgId: event.eventId,
multipleChoice: MultipleChoice(
content: ActivityContent(
question: "What is a synonym for 'happy'?",
choices: ["sad", "angry", "joyful", "tired"],
answer: "joyful",

@ -5,20 +5,93 @@ import 'dart:typed_data';
import 'package:fluffychat/pangea/config/environment.dart';
import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
import 'package:fluffychat/pangea/network/urls.dart';
import 'package:http/http.dart';
import '../network/requests.dart';
class PangeaAudioEventData {
final String text;
final String langCode;
final List<TTSToken> tokens;
PangeaAudioEventData({
required this.text,
required this.langCode,
required this.tokens,
});
factory PangeaAudioEventData.fromJson(dynamic json) => PangeaAudioEventData(
text: json[ModelKey.text] as String,
langCode: json[ModelKey.langCode] as String,
tokens: List<TTSToken>.from(
(json[ModelKey.tokens] as Iterable)
.map((x) => TTSToken.fromJson(x))
.toList(),
),
);
Map<String, dynamic> toJson() => {
ModelKey.text: text,
ModelKey.langCode: langCode,
ModelKey.tokens:
List<Map<String, dynamic>>.from(tokens.map((x) => x.toJson())),
};
}
class TTSToken {
final int startMS;
final int endMS;
final PangeaTokenText text;
TTSToken({
required this.startMS,
required this.endMS,
required this.text,
});
factory TTSToken.fromJson(Map<String, dynamic> json) => TTSToken(
startMS: json["start_ms"],
endMS: json["end_ms"],
text: PangeaTokenText.fromJson(json["text"]),
);
Map<String, dynamic> toJson() => {
"start_ms": startMS,
"end_ms": endMS,
"text": text.toJson(),
};
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;
return other is TTSToken &&
other.startMS == startMS &&
other.endMS == endMS &&
other.text == text;
}
@override
int get hashCode => startMS.hashCode ^ endMS.hashCode ^ text.hashCode;
}
class TextToSpeechRequest {
String text;
String langCode;
List<PangeaTokenText> tokens;
TextToSpeechRequest({required this.text, required this.langCode});
TextToSpeechRequest({
required this.text,
required this.langCode,
required this.tokens,
});
Map<String, dynamic> toJson() => {
ModelKey.text: text,
ModelKey.langCode: langCode,
ModelKey.tokens: tokens.map((token) => token.toJson()).toList(),
};
@override
@ -40,6 +113,7 @@ class TextToSpeechResponse {
int durationMillis;
List<int> waveform;
String fileExtension;
List<TTSToken> ttsTokens;
TextToSpeechResponse({
required this.audioContent,
@ -47,6 +121,7 @@ class TextToSpeechResponse {
required this.durationMillis,
required this.waveform,
required this.fileExtension,
required this.ttsTokens,
});
factory TextToSpeechResponse.fromJson(
@ -58,7 +133,27 @@ class TextToSpeechResponse {
durationMillis: json["duration_millis"],
waveform: List<int>.from(json["wave_form"]),
fileExtension: json["file_extension"],
ttsTokens: List<TTSToken>.from(
json["tts_tokens"].map((x) => TTSToken.fromJson(x)),
),
);
Map<String, dynamic> toJson() => {
"audio_content": audioContent,
"mime_type": mimeType,
"duration_millis": durationMillis,
"wave_form": List<dynamic>.from(waveform.map((x) => x)),
"file_extension": fileExtension,
"tts_tokens": List<dynamic>.from(ttsTokens.map((x) => x.toJson())),
};
PangeaAudioEventData toPangeaAudioEventData(String text, String langCode) {
return PangeaAudioEventData(
text: text,
langCode: langCode,
tokens: ttsTokens,
);
}
}
class _TextToSpeechCacheItem {

@ -1,13 +1,6 @@
enum ActivityDisplayInstructionsEnum { highlight, hide }
enum ActivityDisplayInstructionsEnum { highlight, hide, nothing }
extension ActivityDisplayInstructionsEnumExt
on ActivityDisplayInstructionsEnum {
String get string {
switch (this) {
case ActivityDisplayInstructionsEnum.highlight:
return 'highlight';
case ActivityDisplayInstructionsEnum.hide:
return 'hide';
}
}
String get string => toString().split('.').last;
}

@ -1,4 +1,10 @@
enum ActivityTypeEnum { multipleChoice, freeResponse, listening, speaking }
enum ActivityTypeEnum {
multipleChoice,
freeResponse,
listening,
speaking,
wordFocusListening
}
extension ActivityTypeExtension on ActivityTypeEnum {
String get string {
@ -11,6 +17,8 @@ extension ActivityTypeExtension on ActivityTypeEnum {
return 'listening';
case ActivityTypeEnum.speaking:
return 'speaking';
case ActivityTypeEnum.wordFocusListening:
return 'word_focus_listening';
}
}
}

@ -38,63 +38,49 @@ enum ConstructUseTypeEnum {
/// was target construct in word meaning in context practice activity and incorrectly selected
incPA,
/// was target lemma in word-focus listening activity and correctly selected
corWL,
/// form of lemma was read-aloud in word-focus listening activity and incorrectly selected
incWL,
/// form of lemma was read-aloud in word-focus listening activity and correctly ignored
ignWL,
/// not defined, likely a new construct introduced by choreo and not yet classified by an old version of the client
nan
}
extension ConstructUseTypeExtension on ConstructUseTypeEnum {
String get string {
switch (this) {
case ConstructUseTypeEnum.ga:
return 'ga';
case ConstructUseTypeEnum.wa:
return 'wa';
case ConstructUseTypeEnum.corIt:
return 'corIt';
case ConstructUseTypeEnum.incIt:
return 'incIt';
case ConstructUseTypeEnum.ignIt:
return 'ignIt';
case ConstructUseTypeEnum.ignIGC:
return 'ignIGC';
case ConstructUseTypeEnum.corIGC:
return 'corIGC';
case ConstructUseTypeEnum.incIGC:
return 'incIGC';
case ConstructUseTypeEnum.unk:
return 'unk';
case ConstructUseTypeEnum.corPA:
return 'corPA';
case ConstructUseTypeEnum.incPA:
return 'incPA';
case ConstructUseTypeEnum.ignPA:
return 'ignPA';
}
}
String get string => toString().split('.').last;
IconData get icon {
switch (this) {
case ConstructUseTypeEnum.ga:
return Icons.check;
case ConstructUseTypeEnum.wa:
return Icons.thumb_up_sharp;
case ConstructUseTypeEnum.corIt:
return Icons.translate;
case ConstructUseTypeEnum.incIt:
return Icons.translate;
case ConstructUseTypeEnum.ignIt:
return Icons.translate;
case ConstructUseTypeEnum.ignIGC:
return Icons.close;
case ConstructUseTypeEnum.corIGC:
return Icons.check;
case ConstructUseTypeEnum.incIGC:
return Icons.close;
case ConstructUseTypeEnum.corPA:
return Icons.check;
case ConstructUseTypeEnum.incPA:
return Icons.close;
case ConstructUseTypeEnum.ignPA:
case ConstructUseTypeEnum.ignWL:
case ConstructUseTypeEnum.incWL:
return Icons.close;
case ConstructUseTypeEnum.ga:
case ConstructUseTypeEnum.corIGC:
case ConstructUseTypeEnum.corPA:
case ConstructUseTypeEnum.corWL:
return Icons.check;
case ConstructUseTypeEnum.unk:
case ConstructUseTypeEnum.nan:
return Icons.help;
}
}
@ -107,30 +93,35 @@ extension ConstructUseTypeExtension on ConstructUseTypeEnum {
/// Practice activities get a moderate amount of points.
int get pointValue {
switch (this) {
case ConstructUseTypeEnum.ga:
return 2;
case ConstructUseTypeEnum.corPA:
return 5;
case ConstructUseTypeEnum.wa:
case ConstructUseTypeEnum.corWL:
return 3;
case ConstructUseTypeEnum.ga:
case ConstructUseTypeEnum.corIGC:
return 2;
case ConstructUseTypeEnum.corIt:
return 1;
case ConstructUseTypeEnum.incIt:
return -1;
case ConstructUseTypeEnum.ignIt:
return 1;
case ConstructUseTypeEnum.ignIGC:
case ConstructUseTypeEnum.ignPA:
case ConstructUseTypeEnum.ignWL:
return 1;
case ConstructUseTypeEnum.corIGC:
return 2;
case ConstructUseTypeEnum.incIGC:
return -1;
case ConstructUseTypeEnum.unk:
case ConstructUseTypeEnum.nan:
return 0;
case ConstructUseTypeEnum.corPA:
return 5;
case ConstructUseTypeEnum.incIt:
case ConstructUseTypeEnum.incIGC:
return -1;
case ConstructUseTypeEnum.incPA:
case ConstructUseTypeEnum.incWL:
return -2;
case ConstructUseTypeEnum.ignPA:
return 1;
}
}
}

@ -1,11 +1,15 @@
import 'dart:developer';
import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:fluffychat/pangea/constants/pangea_event_types.dart';
import 'package:fluffychat/pangea/controllers/text_to_speech_controller.dart';
import 'package:fluffychat/pangea/models/choreo_record.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_model.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_record_model.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/widgets/chat/message_audio_card.dart';
import 'package:flutter/foundation.dart';
import 'package:matrix/matrix.dart';
@ -37,4 +41,42 @@ extension PangeaEvent on Event {
throw Exception("$type events do not have pangea content");
}
}
Future<PangeaAudioFile?> getPangeaAudioFile() async {
if (type != EventTypes.Message || messageType != MessageTypes.Audio) {
ErrorHandler.logError(
e: "Event $eventId is not an audio message",
);
return null;
}
// @ggurdin what are cases where these would be null?
// if it would be unexpected, we should log an error with details to investigate
final transcription =
content.tryGetMap<String, dynamic>(ModelKey.transcription);
final audioContent =
content.tryGetMap<String, dynamic>('org.matrix.msc1767.audio');
if (transcription == null || audioContent == null) return null;
final matrixFile = await downloadAndDecryptAttachment();
final duration = audioContent.tryGet<int>('duration');
final waveform = audioContent.tryGetList<int>('waveform');
// old audio messages will not have tokens
final tokensContent = transcription.tryGetList(ModelKey.tokens);
if (tokensContent == null) return null;
final tokens = tokensContent
.map((e) => TTSToken.fromJson(e as Map<String, dynamic>))
.toList();
return PangeaAudioFile(
bytes: matrixFile.bytes,
name: matrixFile.name,
tokens: tokens,
mimeType: matrixFile.mimeType,
duration: duration,
waveform: waveform,
);
}
}

@ -81,17 +81,17 @@ class PangeaMessageEvent {
_representations = null;
}
Future<PangeaAudioFile> getMatrixAudioFile(
Future<PangeaAudioFile?> getMatrixAudioFile(
String langCode,
BuildContext context,
) async {
final String text = (await representationByLanguageGlobal(
langCode: langCode,
))
?.text ??
body;
final RepresentationEvent? rep = representationByLanguage(langCode);
if (rep == null) return null;
final TextToSpeechRequest params = TextToSpeechRequest(
text: text,
text: rep.content.text,
tokens: (await rep.tokensGlobal(context)).map((t) => t.text).toList(),
langCode: langCode,
);
@ -111,9 +111,10 @@ class PangeaMessageEvent {
mimeType: response.mimeType,
duration: response.durationMillis,
waveform: response.waveform,
tokens: response.ttsTokens,
);
sendAudioEvent(file, response, text, langCode);
sendAudioEvent(file, response, rep.text, langCode);
return file;
}
@ -137,10 +138,8 @@ class PangeaMessageEvent {
'duration': response.durationMillis,
'waveform': response.waveform,
},
ModelKey.transcription: {
ModelKey.text: text,
ModelKey.langCode: langCode,
},
ModelKey.transcription:
response.toPangeaAudioEventData(text, langCode).toJson(),
},
);
@ -155,97 +154,46 @@ class PangeaMessageEvent {
return audioEvent;
}
//get audio for text and language
//if no audio exists, create it
//if audio exists, return it
Future<Event?> getTextToSpeechGlobal(String langCode) async {
final String text = representationByLanguage(langCode)?.text ?? body;
final local = getTextToSpeechLocal(langCode, text);
if (local != null) return Future.value(local);
final TextToSpeechRequest params = TextToSpeechRequest(
text: text,
langCode: langCode,
);
final TextToSpeechResponse response =
await MatrixState.pangeaController.textToSpeech.get(
params,
);
final audioBytes = base64.decode(response.audioContent);
// if (!TextToSpeechController.isOggFile(audioBytes)) {
// throw Exception("File is not a valid OGG format");
// } else {
// debugPrint("File is a valid OGG format");
// }
// from text, trim whitespace, remove special characters, and limit to 20 characters
// final fileName =
// text.trim().replaceAll(RegExp('[^A-Za-z0-9]'), '').substring(0, 20);
final eventIdParam = _event.eventId;
final fileName =
"audio_for_${eventIdParam}_$langCode.${response.fileExtension}";
final file = MatrixAudioFile(
bytes: audioBytes,
name: fileName,
mimeType: response.mimeType,
);
// try {
final String? eventId = await room.sendFileEvent(
file,
inReplyTo: _event,
extraContent: {
'info': {
...file.info,
'duration': response.durationMillis,
},
'org.matrix.msc3245.voice': {},
'org.matrix.msc1767.audio': {
'duration': response.durationMillis,
'waveform': response.waveform,
},
ModelKey.transcription: {
ModelKey.text: text,
ModelKey.langCode: langCode,
},
},
);
// .timeout(
// Durations.long4,
// onTimeout: () {
// debugPrint("timeout in getTextToSpeechGlobal");
// return null;
// },
// );
debugPrint("eventId in getTextToSpeechGlobal $eventId");
return eventId != null ? room.getEventById(eventId) : null;
}
Event? getTextToSpeechLocal(String langCode, String text) {
return allAudio.firstWhereOrNull(
(element) {
// Safely access the transcription map
final transcription = element.content.tryGetMap(ModelKey.transcription);
// return transcription != null;
if (transcription == null) {
// If transcription is null, this element does not match.
(event) {
try {
// Safely access
final dataMap = event.content.tryGetMap(ModelKey.transcription);
if (dataMap == null) {
return false;
}
// old text to speech content will not have TTSToken data
// we want to disregard them and just generate new ones
// for that, we'll return false if 'tokens' are null
// while in-development, we'll pause here to inspect
// debugger can be removed after we're sure it's working
if (dataMap['tokens'] == null) {
// events before today will definitely not have the tokens
debugger(
when: kDebugMode &&
event.originServerTs.isAfter(DateTime(2024, 10, 16)),
);
return false;
}
final PangeaAudioEventData audioData =
PangeaAudioEventData.fromJson(dataMap as dynamic);
// Check if both language code and text match
return audioData.langCode == langCode && audioData.text == text;
} catch (e, s) {
debugger(when: kDebugMode);
ErrorHandler.logError(
e: e,
s: s,
data: event.content.tryGetMap(ModelKey.transcription),
m: "error parsing data in getTextToSpeechLocal",
);
return false;
}
// Safely get language code and text from the transcription
final elementLangCode = transcription[ModelKey.langCode];
final elementText = transcription[ModelKey.text];
// Check if both language code and text matsch
return elementLangCode == langCode && elementText == text;
},
);
}

@ -1,195 +1,195 @@
import 'dart:convert';
import 'dart:developer';
import 'package:fluffychat/pangea/enum/construct_use_type_enum.dart';
import 'package:fluffychat/pangea/models/analytics/constructs_model.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import '../enum/vocab_proficiency_enum.dart';
class VocabHeadwords {
List<VocabList> lists;
VocabHeadwords({
required this.lists,
});
/// in json parameter, keys are the names of the VocabList
/// values are the words in the VocabList
factory VocabHeadwords.fromJson(Map<String, dynamic> json) {
final List<VocabList> lists = [];
for (final entry in json.entries) {
lists.add(
VocabList(
name: entry.key,
lemmas: (entry.value as Iterable).cast<String>().toList(),
),
);
}
return VocabHeadwords(lists: lists);
}
static Future<VocabHeadwords> getHeadwords(String langCode) async {
final String data =
await rootBundle.loadString('${langCode}_headwords.json');
final decoded = jsonDecode(data);
final VocabHeadwords headwords = VocabHeadwords.fromJson(decoded);
return headwords;
}
}
class VocabList {
String name;
/// key is lemma
Map<String, VocabTotals> words = {};
VocabList({
required this.name,
required List<String> lemmas,
}) {
for (final lemma in lemmas) {
words[lemma] = VocabTotals.newTotals;
}
}
void addVocabUse(String lemma, List<OneConstructUse> use) {
words[lemma.toUpperCase()]?.addVocabUseBasedOnUseType(use);
}
ListTotals calculuateTotals() {
final ListTotals listTotals = ListTotals.empty;
for (final word in words.entries) {
debugger(when: kDebugMode && word.key == "baloncesto".toLowerCase());
listTotals.addByType(word.value.proficiencyLevel);
}
return listTotals;
}
}
class ListTotals {
int low;
int medium;
int high;
int unknown;
ListTotals({
required this.low,
required this.medium,
required this.high,
required this.unknown,
});
static get empty => ListTotals(low: 0, medium: 0, high: 0, unknown: 0);
void addByType(VocabProficiencyEnum prof) {
switch (prof) {
case VocabProficiencyEnum.low:
low++;
break;
case VocabProficiencyEnum.medium:
medium++;
break;
case VocabProficiencyEnum.high:
high++;
break;
case VocabProficiencyEnum.unk:
unknown++;
break;
}
}
}
class VocabTotals {
num ga;
num wa;
num corIt;
num incIt;
num ignIt;
VocabTotals({
required this.ga,
required this.wa,
required this.corIt,
required this.incIt,
required this.ignIt,
});
num get calculateEstimatedVocabProficiency {
const num gaWeight = -1;
const num waWeight = 1;
const num corItWeight = 0.5;
const num incItWeight = -0.5;
const num ignItWeight = 0.1;
final num gaScore = ga * gaWeight;
final num waScore = wa * waWeight;
final num corItScore = corIt * corItWeight;
final num incItScore = incIt * incItWeight;
final num ignItScore = ignIt * ignItWeight;
final num totalScore =
gaScore + waScore + corItScore + incItScore + ignItScore;
return totalScore;
}
VocabProficiencyEnum get proficiencyLevel =>
VocabProficiencyUtil.proficiency(calculateEstimatedVocabProficiency);
static VocabTotals get newTotals {
return VocabTotals(
ga: 0,
wa: 0,
corIt: 0,
incIt: 0,
ignIt: 0,
);
}
void addVocabUseBasedOnUseType(List<OneConstructUse> uses) {
for (final use in uses) {
switch (use.useType) {
case ConstructUseTypeEnum.ga:
ga++;
break;
case ConstructUseTypeEnum.wa:
wa++;
break;
case ConstructUseTypeEnum.corIt:
corIt++;
break;
case ConstructUseTypeEnum.incIt:
incIt++;
break;
case ConstructUseTypeEnum.ignIt:
ignIt++;
break;
//TODO - these shouldn't be counted as such
case ConstructUseTypeEnum.ignIGC:
ignIt++;
break;
case ConstructUseTypeEnum.corIGC:
corIt++;
break;
case ConstructUseTypeEnum.incIGC:
incIt++;
break;
//TODO if we bring back Headwords then we need to add these
case ConstructUseTypeEnum.corPA:
break;
case ConstructUseTypeEnum.incPA:
break;
case ConstructUseTypeEnum.unk:
break;
case ConstructUseTypeEnum.ignPA:
break;
}
}
}
}
// import 'dart:convert';
// import 'dart:developer';
// import 'package:fluffychat/pangea/enum/construct_use_type_enum.dart';
// import 'package:fluffychat/pangea/models/analytics/constructs_model.dart';
// import 'package:flutter/foundation.dart';
// import 'package:flutter/services.dart';
// import '../enum/vocab_proficiency_enum.dart';
// class VocabHeadwords {
// List<VocabList> lists;
// VocabHeadwords({
// required this.lists,
// });
// /// in json parameter, keys are the names of the VocabList
// /// values are the words in the VocabList
// factory VocabHeadwords.fromJson(Map<String, dynamic> json) {
// final List<VocabList> lists = [];
// for (final entry in json.entries) {
// lists.add(
// VocabList(
// name: entry.key,
// lemmas: (entry.value as Iterable).cast<String>().toList(),
// ),
// );
// }
// return VocabHeadwords(lists: lists);
// }
// static Future<VocabHeadwords> getHeadwords(String langCode) async {
// final String data =
// await rootBundle.loadString('${langCode}_headwords.json');
// final decoded = jsonDecode(data);
// final VocabHeadwords headwords = VocabHeadwords.fromJson(decoded);
// return headwords;
// }
// }
// class VocabList {
// String name;
// /// key is lemma
// Map<String, VocabTotals> words = {};
// VocabList({
// required this.name,
// required List<String> lemmas,
// }) {
// for (final lemma in lemmas) {
// words[lemma] = VocabTotals.newTotals;
// }
// }
// void addVocabUse(String lemma, List<OneConstructUse> use) {
// words[lemma.toUpperCase()]?.addVocabUseBasedOnUseType(use);
// }
// ListTotals calculuateTotals() {
// final ListTotals listTotals = ListTotals.empty;
// for (final word in words.entries) {
// debugger(when: kDebugMode && word.key == "baloncesto".toLowerCase());
// listTotals.addByType(word.value.proficiencyLevel);
// }
// return listTotals;
// }
// }
// class ListTotals {
// int low;
// int medium;
// int high;
// int unknown;
// ListTotals({
// required this.low,
// required this.medium,
// required this.high,
// required this.unknown,
// });
// static get empty => ListTotals(low: 0, medium: 0, high: 0, unknown: 0);
// void addByType(VocabProficiencyEnum prof) {
// switch (prof) {
// case VocabProficiencyEnum.low:
// low++;
// break;
// case VocabProficiencyEnum.medium:
// medium++;
// break;
// case VocabProficiencyEnum.high:
// high++;
// break;
// case VocabProficiencyEnum.unk:
// unknown++;
// break;
// }
// }
// }
// class VocabTotals {
// num ga;
// num wa;
// num corIt;
// num incIt;
// num ignIt;
// VocabTotals({
// required this.ga,
// required this.wa,
// required this.corIt,
// required this.incIt,
// required this.ignIt,
// });
// num get calculateEstimatedVocabProficiency {
// const num gaWeight = -1;
// const num waWeight = 1;
// const num corItWeight = 0.5;
// const num incItWeight = -0.5;
// const num ignItWeight = 0.1;
// final num gaScore = ga * gaWeight;
// final num waScore = wa * waWeight;
// final num corItScore = corIt * corItWeight;
// final num incItScore = incIt * incItWeight;
// final num ignItScore = ignIt * ignItWeight;
// final num totalScore =
// gaScore + waScore + corItScore + incItScore + ignItScore;
// return totalScore;
// }
// VocabProficiencyEnum get proficiencyLevel =>
// VocabProficiencyUtil.proficiency(calculateEstimatedVocabProficiency);
// static VocabTotals get newTotals {
// return VocabTotals(
// ga: 0,
// wa: 0,
// corIt: 0,
// incIt: 0,
// ignIt: 0,
// );
// }
// void addVocabUseBasedOnUseType(List<OneConstructUse> uses) {
// for (final use in uses) {
// switch (use.useType) {
// case ConstructUseTypeEnum.ga:
// ga++;
// break;
// case ConstructUseTypeEnum.wa:
// wa++;
// break;
// case ConstructUseTypeEnum.corIt:
// corIt++;
// break;
// case ConstructUseTypeEnum.incIt:
// incIt++;
// break;
// case ConstructUseTypeEnum.ignIt:
// ignIt++;
// break;
// //TODO - these shouldn't be counted as such
// case ConstructUseTypeEnum.ignIGC:
// ignIt++;
// break;
// case ConstructUseTypeEnum.corIGC:
// corIt++;
// break;
// case ConstructUseTypeEnum.incIGC:
// incIt++;
// break;
// //TODO if we bring back Headwords then we need to add these
// case ConstructUseTypeEnum.corPA:
// break;
// case ConstructUseTypeEnum.incPA:
// break;
// case ConstructUseTypeEnum.unk:
// break;
// case ConstructUseTypeEnum.ignPA:
// break;
// }
// }
// }
// }

@ -128,8 +128,6 @@ class PangeaToken {
lemma: lemma.text,
type: ConstructTypeEnum.vocab,
),
xp: 0,
lastUsed: null,
),
);
@ -140,8 +138,6 @@ class PangeaToken {
lemma: morph.key,
type: ConstructTypeEnum.morph,
),
xp: 0,
lastUsed: null,
),
);
}

@ -1,5 +1,6 @@
import 'package:collection/collection.dart';
import 'package:fluffychat/pangea/enum/activity_type_enum.dart';
import 'package:fluffychat/pangea/enum/construct_use_type_enum.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_model.dart';
@ -7,11 +8,13 @@ class ConstructWithXP {
final ConstructIdentifier id;
int xp;
DateTime? lastUsed;
List<ConstructUseTypeEnum> condensedConstructUses;
ConstructWithXP({
required this.id,
required this.xp,
required this.lastUsed,
this.xp = 0,
this.lastUsed,
this.condensedConstructUses = const [],
});
factory ConstructWithXP.fromJson(Map<String, dynamic> json) {
@ -23,6 +26,14 @@ class ConstructWithXP {
lastUsed: json['last_used'] != null
? DateTime.parse(json['last_used'] as String)
: null,
condensedConstructUses: (json['uses'] as List<String>).map((e) {
return ConstructUseTypeEnum.values.firstWhereOrNull(
(element) =>
element.string == e ||
element.toString().split('.').last == e,
) ??
ConstructUseTypeEnum.nan;
}).toList(),
);
}
@ -31,6 +42,7 @@ class ConstructWithXP {
'construct_id': id.toJson(),
'xp': xp,
'last_used': lastUsed?.toIso8601String(),
'uses': condensedConstructUses.map((e) => e.string).toList(),
};
return json;
}

@ -5,13 +5,13 @@ import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activ
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
class MultipleChoice {
class ActivityContent {
final String question;
final List<String> choices;
final String answer;
final RelevantSpanDisplayDetails? spanDisplayDetails;
MultipleChoice({
ActivityContent({
required this.question,
required this.choices,
required this.answer,
@ -37,12 +37,12 @@ class MultipleChoice {
Color choiceColor(int index) =>
index == correctAnswerIndex ? AppConfig.success : AppConfig.warning;
factory MultipleChoice.fromJson(Map<String, dynamic> json) {
factory ActivityContent.fromJson(Map<String, dynamic> json) {
final spanDisplay = json['span_display_details'] != null &&
json['span_display_details'] is Map
? RelevantSpanDisplayDetails.fromJson(json['span_display_details'])
: null;
return MultipleChoice(
return ActivityContent(
question: json['question'] as String,
choices: (json['choices'] as List).map((e) => e as String).toList(),
answer: json['answer'] ?? json['correct_answer'] as String,

@ -165,110 +165,30 @@ class PracticeActivityRequest {
}
}
class FreeResponse {
final String question;
final String correctAnswer;
final String gradingGuide;
FreeResponse({
required this.question,
required this.correctAnswer,
required this.gradingGuide,
});
factory FreeResponse.fromJson(Map<String, dynamic> json) {
return FreeResponse(
question: json['question'] as String,
correctAnswer: json['correct_answer'] as String,
gradingGuide: json['grading_guide'] as String,
);
}
Map<String, dynamic> toJson() {
return {
'question': question,
'correct_answer': correctAnswer,
'grading_guide': gradingGuide,
};
}
}
class Listening {
final String audioUrl;
final String text;
Listening({required this.audioUrl, required this.text});
factory Listening.fromJson(Map<String, dynamic> json) {
return Listening(
audioUrl: json['audio_url'] as String,
text: json['text'] as String,
);
}
Map<String, dynamic> toJson() {
return {
'audio_url': audioUrl,
'text': text,
};
}
}
class Speaking {
final String text;
Speaking({required this.text});
factory Speaking.fromJson(Map<String, dynamic> json) {
return Speaking(
text: json['text'] as String,
);
}
Map<String, dynamic> toJson() {
return {
'text': text,
};
}
}
class PracticeActivityModel {
final List<ConstructIdentifier> tgtConstructs;
final String langCode;
final String msgId;
final ActivityTypeEnum activityType;
final MultipleChoice? multipleChoice;
final Listening? listening;
final Speaking? speaking;
final FreeResponse? freeResponse;
final ActivityContent content;
PracticeActivityModel({
required this.tgtConstructs,
required this.langCode,
required this.msgId,
required this.activityType,
this.multipleChoice,
this.listening,
this.speaking,
this.freeResponse,
required this.content,
});
String get question {
switch (activityType) {
case ActivityTypeEnum.multipleChoice:
return multipleChoice!.question;
case ActivityTypeEnum.listening:
return listening!.text;
case ActivityTypeEnum.speaking:
return speaking!.text;
case ActivityTypeEnum.freeResponse:
return freeResponse!.question;
default:
return '';
}
}
String get question => content.question;
factory PracticeActivityModel.fromJson(Map<String, dynamic> json) {
// moving from multiple_choice to content as the key
// this is to make the model more generic
// here for backward compatibility
final Map<String, dynamic> content =
(json['content'] ?? json["multiple_choice"]) as Map<String, dynamic>;
return PracticeActivityModel(
tgtConstructs: ((json['tgt_constructs'] ?? json['target_constructs'])
as List)
@ -283,27 +203,14 @@ class PracticeActivityModel {
e.string == json['activity_type'] as String ||
e.string.split('.').last == json['activity_type'] as String,
),
multipleChoice: json['multiple_choice'] != null
? MultipleChoice.fromJson(
json['multiple_choice'] as Map<String, dynamic>,
)
: null,
listening: json['listening'] != null
? Listening.fromJson(json['listening'] as Map<String, dynamic>)
: null,
speaking: json['speaking'] != null
? Speaking.fromJson(json['speaking'] as Map<String, dynamic>)
: null,
freeResponse: json['free_response'] != null
? FreeResponse.fromJson(
json['free_response'] as Map<String, dynamic>,
)
: null,
content: ActivityContent.fromJson(
content,
),
);
}
RelevantSpanDisplayDetails? get relevantSpanDisplayDetails =>
multipleChoice?.spanDisplayDetails;
content.spanDisplayDetails;
Map<String, dynamic> toJson() {
return {
@ -311,10 +218,7 @@ class PracticeActivityModel {
'lang_code': langCode,
'msg_id': msgId,
'activity_type': activityType.string,
'multiple_choice': multipleChoice?.toJson(),
'listening': listening?.toJson(),
'speaking': speaking?.toJson(),
'free_response': freeResponse?.toJson(),
'content': content.toJson(),
};
}
@ -328,10 +232,7 @@ class PracticeActivityModel {
other.langCode == langCode &&
other.msgId == msgId &&
other.activityType == activityType &&
other.multipleChoice == multipleChoice &&
other.listening == listening &&
other.speaking == speaking &&
other.freeResponse == freeResponse;
other.content == content;
}
@override
@ -340,10 +241,7 @@ class PracticeActivityModel {
langCode.hashCode ^
msgId.hashCode ^
activityType.hashCode ^
multipleChoice.hashCode ^
listening.hashCode ^
speaking.hashCode ^
freeResponse.hashCode;
content.hashCode;
}
}
@ -372,7 +270,7 @@ class RelevantSpanDisplayDetails {
return RelevantSpanDisplayDetails(
offset: json['offset'] as int,
length: json['length'] as int,
displayInstructions: display ?? ActivityDisplayInstructionsEnum.hide,
displayInstructions: display ?? ActivityDisplayInstructionsEnum.nothing,
);
}
@ -384,7 +282,6 @@ class RelevantSpanDisplayDetails {
};
}
// override operator == and hashCode
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;

@ -1,10 +1,18 @@
import 'dart:developer';
import 'dart:math';
import 'package:fluffychat/pages/chat/events/audio_player.dart';
import 'package:fluffychat/pangea/controllers/text_to_speech_controller.dart';
import 'package:fluffychat/pangea/extensions/pangea_event_extension.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/widgets/chat/message_selection_overlay.dart';
import 'package:fluffychat/pangea/widgets/chat/message_toolbar.dart';
import 'package:fluffychat/pangea/widgets/chat/toolbar_content_loading_indicator.dart';
import 'package:fluffychat/pangea/widgets/chat/tts_controller.dart';
import 'package:fluffychat/pangea/widgets/igc/card_error_widget.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:matrix/matrix.dart';
@ -12,11 +20,13 @@ import 'package:matrix/matrix.dart';
class MessageAudioCard extends StatefulWidget {
final PangeaMessageEvent messageEvent;
final MessageOverlayController overlayController;
final PangeaTokenText? selection;
const MessageAudioCard({
super.key,
required this.messageEvent,
required this.overlayController,
this.selection,
});
@override
@ -25,9 +35,113 @@ class MessageAudioCard extends StatefulWidget {
class MessageAudioCardState extends State<MessageAudioCard> {
bool _isLoading = false;
Event? localAudioEvent;
PangeaAudioFile? audioFile;
int? sectionStartMS;
int? sectionEndMS;
TtsController tts = TtsController();
@override
void initState() {
super.initState();
fetchAudio();
// initializeTTS();
}
// initializeTTS() async {
// tts.setupTTS().then((value) => setState(() {}));
// }
@override
void didUpdateWidget(covariant oldWidget) {
// @ggurdin did you find a case of needing to reinitialize TTS because of a language change?
// if (widget.messageEvent.messageDisplayLangCode !=
// oldWidget.messageEvent.messageDisplayLangCode) {
// initializeTTS();
// }
if (oldWidget.selection != widget.selection) {
debugPrint('selection changed');
setSectionStartAndEndFromSelection();
playSelectionAudio();
}
super.didUpdateWidget(oldWidget);
}
Future<void> playSelectionAudio() async {
final PangeaTokenText selection = widget.selection!;
final tokenText = selection.content;
await tts.speak(tokenText);
}
void setSectionStartAndEnd(int? start, int? end) => mounted
? setState(() {
sectionStartMS = start;
sectionEndMS = end;
})
: null;
void setSectionStartAndEndFromSelection() async {
if (audioFile == null) {
// should never happen but just in case
debugger(when: kDebugMode);
return;
}
if (audioFile!.duration == null) {
// should never happen but just in case
debugger(when: kDebugMode);
ErrorHandler.logError(
e: Exception(),
m: 'audioFile duration is null in MessageAudioCardState',
data: {
'audioFile': audioFile,
},
);
return setSectionStartAndEnd(null, null);
}
// if there is no selection, we don't need to do anything
// but clear the section start and end
if (widget.selection == null) {
return setSectionStartAndEnd(null, null);
}
final PangeaTokenText selection = widget.selection!;
final List<TTSToken> tokens = audioFile!.tokens;
// find the token that corresponds to the selection
// set the start to the start of the token
// set the end to the start of the next token or to the duration of the audio if
// if there is no next token
for (int i = 0; i < tokens.length; i++) {
final TTSToken ttsToken = tokens[i];
if (ttsToken.text.offset == selection.offset) {
return setSectionStartAndEnd(
max(ttsToken.startMS - 150, 0),
min(ttsToken.endMS + 150, audioFile!.duration!),
);
}
}
// if we didn't find the token, we should pause if debug and log an error
debugger(when: kDebugMode);
ErrorHandler.logError(
e: Exception(),
m: 'could not find token for selection in MessageAudioCardState',
data: {
'selection': selection,
'tokens': tokens,
'sttTokens': audioFile!.tokens,
},
);
setSectionStartAndEnd(null, null);
}
Future<void> fetchAudio() async {
if (!mounted) return;
setState(() => _isLoading = true);
@ -36,20 +150,27 @@ class MessageAudioCardState extends State<MessageAudioCard> {
final String langCode = widget.messageEvent.messageDisplayLangCode;
final String? text =
widget.messageEvent.representationByLanguage(langCode)?.text;
if (text != null) {
final Event? localEvent =
widget.messageEvent.getTextToSpeechLocal(langCode, text);
if (localEvent != null) {
localAudioEvent = localEvent;
if (mounted) setState(() => _isLoading = false);
return;
}
if (text == null) {
//TODO - handle error but get out of flow
}
audioFile =
await widget.messageEvent.getMatrixAudioFile(langCode, context);
final Event? localEvent =
widget.messageEvent.getTextToSpeechLocal(langCode, text!);
if (localEvent != null) {
audioFile = await localEvent.getPangeaAudioFile();
} else {
audioFile = await widget.messageEvent.getMatrixAudioFile(
langCode,
context,
);
}
debugPrint("audio file is now: $audioFile. setting starts and ends...");
setSectionStartAndEndFromSelection();
if (mounted) setState(() => _isLoading = false);
} catch (e, s) {
debugger(when: kDebugMode);
debugPrint(StackTrace.current.toString());
if (!mounted) return;
setState(() => _isLoading = false);
@ -68,19 +189,6 @@ class MessageAudioCardState extends State<MessageAudioCard> {
},
);
}
return;
}
@override
void initState() {
super.initState();
//once we have audio for words, we'll play that
if (widget.overlayController.isSelection) {
widget.overlayController.clearSelection();
}
fetchAudio();
}
@override
@ -91,15 +199,17 @@ class MessageAudioCardState extends State<MessageAudioCard> {
alignment: Alignment.center,
child: _isLoading
? const ToolbarContentLoadingIndicator()
: localAudioEvent != null || audioFile != null
: audioFile != null
? Column(
children: [
AudioPlayerWidget(
localAudioEvent,
color: Theme.of(context).colorScheme.onPrimaryContainer,
null,
matrixFile: audioFile,
autoplay: true,
sectionStartMS: sectionStartMS,
sectionEndMS: sectionEndMS,
color: Theme.of(context).colorScheme.onPrimaryContainer,
),
tts.missingVoiceButton ?? const SizedBox(),
],
)
: const CardErrorWidget(),
@ -109,6 +219,7 @@ class MessageAudioCardState extends State<MessageAudioCard> {
class PangeaAudioFile extends MatrixAudioFile {
List<int>? waveform;
List<TTSToken> tokens;
PangeaAudioFile({
required super.bytes,
@ -116,5 +227,6 @@ class PangeaAudioFile extends MatrixAudioFile {
super.mimeType,
super.duration,
this.waveform,
required this.tokens,
});
}

@ -6,6 +6,7 @@ import 'package:fluffychat/config/setting_keys.dart';
import 'package:fluffychat/config/themes.dart';
import 'package:fluffychat/pages/chat/chat.dart';
import 'package:fluffychat/pages/chat/events/message_reactions.dart';
import 'package:fluffychat/pangea/enum/activity_display_instructions_enum.dart';
import 'package:fluffychat/pangea/enum/message_mode_enum.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
@ -182,8 +183,10 @@ class MessageOverlayController extends State<MessageSelectionOverlay>
void onClickOverlayMessageToken(
PangeaToken token,
) {
if ([MessageMode.practiceActivity, MessageMode.textToSpeech]
.contains(toolbarMode)) {
if ([
MessageMode.practiceActivity,
// MessageMode.textToSpeech
].contains(toolbarMode)) {
return;
}
@ -210,19 +213,23 @@ class MessageOverlayController extends State<MessageSelectionOverlay>
void setSelectedSpan(PracticeActivityModel activity) {
final RelevantSpanDisplayDetails? span =
activity.multipleChoice?.spanDisplayDetails;
activity.content.spanDisplayDetails;
if (span == null) {
debugger(when: kDebugMode);
return;
}
_selectedSpan = PangeaTokenText(
offset: span.offset,
length: span.length,
content: widget._pangeaMessageEvent.messageDisplayText
.substring(span.offset, span.offset + span.length),
);
if (span.displayInstructions != ActivityDisplayInstructionsEnum.nothing) {
_selectedSpan = PangeaTokenText(
offset: span.offset,
length: span.length,
content: widget._pangeaMessageEvent.messageDisplayText
.substring(span.offset, span.offset + span.length),
);
} else {
_selectedSpan = null;
}
setState(() {});
}

@ -49,6 +49,7 @@ class MessageToolbar extends StatelessWidget {
return MessageAudioCard(
messageEvent: pangeaMessageEvent,
overlayController: overLayController,
selection: overLayController.selectedSpan,
);
case MessageMode.speechToText:
return MessageSpeechToTextCard(

@ -70,34 +70,32 @@ class ToolbarButtons extends StatelessWidget {
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: modes
.mapIndexed(
(index, mode) => Tooltip(
message: mode.tooltip(context),
child: IconButton(
iconSize: 20,
icon: Icon(mode.icon),
color: mode == overlayController.toolbarMode
? Colors.white
: null,
isSelected: mode == overlayController.toolbarMode,
style: ButtonStyle(
backgroundColor: WidgetStateProperty.all(
mode.iconButtonColor(
context,
index,
overlayController.toolbarMode,
pangeaMessageEvent.numberOfActivitiesCompleted,
overlayController.isPracticeComplete,
),
(index, mode) => IconButton(
iconSize: 20,
icon: Icon(mode.icon),
tooltip: mode.tooltip(context),
color: mode == overlayController.toolbarMode
? Colors.white
: null,
isSelected: mode == overlayController.toolbarMode,
style: ButtonStyle(
backgroundColor: WidgetStateProperty.all(
mode.iconButtonColor(
context,
index,
overlayController.toolbarMode,
pangeaMessageEvent.numberOfActivitiesCompleted,
overlayController.isPracticeComplete,
),
),
onPressed: mode.isUnlocked(
index,
pangeaMessageEvent.numberOfActivitiesCompleted,
overlayController.isPracticeComplete,
)
? () => overlayController.updateToolbarMode(mode)
: null,
),
onPressed: mode.isUnlocked(
index,
pangeaMessageEvent.numberOfActivitiesCompleted,
overlayController.isPracticeComplete,
)
? () => overlayController.updateToolbarMode(mode)
: null,
),
)
.toList(),

@ -0,0 +1,61 @@
import 'dart:io';
import 'package:android_intent_plus/android_intent.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:future_loading_dialog/future_loading_dialog.dart';
class MissingVoiceButton extends StatelessWidget {
final String targetLangCode;
const MissingVoiceButton({
required this.targetLangCode,
super.key,
});
void launchTTSSettings(BuildContext context) {
if (Platform.isAndroid) {
const intent = AndroidIntent(
action: 'com.android.settings.TTS_SETTINGS',
package: 'com.talktolearn.chat',
);
showFutureLoadingDialog(
context: context,
future: intent.launch,
);
}
}
@override
Widget build(BuildContext context) {
return Container(
decoration: BoxDecoration(
color:
Theme.of(context).colorScheme.onPrimaryContainer.withOpacity(0.1),
borderRadius: const BorderRadius.all(
Radius.circular(AppConfig.borderRadius),
),
),
padding: const EdgeInsets.all(8),
margin: const EdgeInsets.only(top: 8),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Text(
L10n.of(context)!.voiceNotAvailable,
textAlign: TextAlign.center,
),
TextButton(
onPressed: () => launchTTSSettings,
style: const ButtonStyle(
tapTargetSize: MaterialTapTargetSize.shrinkWrap,
),
child: Text(L10n.of(context)!.openVoiceSettings),
),
],
),
);
}
}

@ -0,0 +1,77 @@
import 'dart:developer';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/widgets/chat/missing_voice_button.dart';
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_tts/flutter_tts.dart' as flutter_tts;
class TtsController {
String? targetLanguage;
List<String> availableLangCodes = [];
final flutter_tts.FlutterTts tts = flutter_tts.FlutterTts();
// if targetLanguage isn't set here, it needs to be set later
TtsController() {
setupTTS();
}
Future<void> setupTTS() async {
try {
targetLanguage ??=
MatrixState.pangeaController.languageController.userL2?.langCode;
debugger(when: kDebugMode && targetLanguage == null);
debugPrint('setupTTS targetLanguage: $targetLanguage');
tts.setLanguage(
targetLanguage ?? "en",
);
await tts.awaitSpeakCompletion(true);
final voices = await tts.getVoices;
availableLangCodes = (voices as List)
.map((v) {
// debugPrint('v: $v');
//@ggurdin i changed this from name to locale
//in my testing, that's where the language code is stored
// maybe it's different for different devices? was it different in your android testing?
// return v['name']?.split("-").first;
return v['locale']?.split("-").first;
})
.toSet()
.cast<String>()
.toList();
debugPrint("lang supported? $isLanguageFullySupported");
} catch (e, s) {
debugger(when: kDebugMode);
ErrorHandler.logError(e: e, s: s);
}
}
Future<void> speak(String text) async {
targetLanguage ??=
MatrixState.pangeaController.languageController.userL2?.langCode;
await tts.stop();
return tts.speak(text);
}
bool get isLanguageFullySupported =>
availableLangCodes.contains(targetLanguage);
// @ggurdin
Widget get missingVoiceButton => targetLanguage != null &&
(kIsWeb || isLanguageFullySupported || !PlatformInfos.isAndroid)
? const SizedBox.shrink()
: MissingVoiceButton(
targetLangCode: targetLanguage!,
);
}

@ -3,9 +3,11 @@ import 'dart:developer';
import 'package:collection/collection.dart';
import 'package:fluffychat/pangea/choreographer/widgets/choice_array.dart';
import 'package:fluffychat/pangea/controllers/my_analytics_controller.dart';
import 'package:fluffychat/pangea/enum/activity_type_enum.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_model.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_record_model.dart';
import 'package:fluffychat/pangea/widgets/practice_activity/practice_activity_card.dart';
import 'package:fluffychat/pangea/widgets/practice_activity/word_audio_button.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
@ -13,7 +15,7 @@ import 'package:flutter/material.dart';
/// The multiple choice activity view
class MultipleChoiceActivity extends StatefulWidget {
final MessagePracticeActivityCardState practiceCardController;
final PracticeActivityModel? currentActivity;
final PracticeActivityModel currentActivity;
const MultipleChoiceActivity({
super.key,
@ -52,7 +54,7 @@ class MultipleChoiceActivityState extends State<MultipleChoiceActivity> {
}
final bool isCorrect =
widget.currentActivity!.multipleChoice!.isCorrect(value, index);
widget.currentActivity.content.isCorrect(value, index);
currentRecordModel?.addResponse(
text: value,
@ -79,7 +81,7 @@ class MultipleChoiceActivityState extends State<MultipleChoiceActivity> {
);
// If the selected choice is correct, send the record and get the next activity
if (widget.currentActivity!.multipleChoice!.isCorrect(value, index)) {
if (widget.currentActivity.content.isCorrect(value, index)) {
widget.practiceCardController.onActivityFinish();
}
@ -90,39 +92,37 @@ class MultipleChoiceActivityState extends State<MultipleChoiceActivity> {
@override
Widget build(BuildContext context) {
final PracticeActivityModel? practiceActivity = widget.currentActivity;
if (practiceActivity == null) {
return const SizedBox();
}
final PracticeActivityModel practiceActivity = widget.currentActivity;
return Container(
padding: const EdgeInsets.all(8),
child: Column(
children: [
Text(
practiceActivity.multipleChoice!.question,
practiceActivity.content.question,
style: const TextStyle(
fontSize: 16,
fontWeight: FontWeight.bold,
),
),
const SizedBox(height: 8),
if (practiceActivity.activityType ==
ActivityTypeEnum.wordFocusListening)
WordAudioButton(text: practiceActivity.content.answer),
ChoicesArray(
isLoading: false,
uniqueKeyForLayerLink: (index) => "multiple_choice_$index",
originalSpan: "placeholder",
onPressed: updateChoice,
selectedChoiceIndex: selectedChoiceIndex,
choices: practiceActivity.multipleChoice!.choices
choices: practiceActivity.content.choices
.mapIndexed(
(index, value) => Choice(
text: value,
color: currentRecordModel?.hasTextResponse(value) ?? false
? practiceActivity.multipleChoice!.choiceColor(index)
? practiceActivity.content.choiceColor(index)
: null,
isGold: practiceActivity.multipleChoice!
.isCorrect(value, index),
isGold: practiceActivity.content.isCorrect(value, index),
),
)
.toList(),

@ -277,7 +277,14 @@ class MessagePracticeActivityCardState extends State<PracticeActivityCard> {
case ActivityTypeEnum.multipleChoice:
return MultipleChoiceActivity(
practiceCardController: this,
currentActivity: currentActivity,
currentActivity: currentActivity!,
);
case ActivityTypeEnum.wordFocusListening:
// return WordFocusListeningActivity(
// activity: currentActivity!, practiceCardController: this);
return MultipleChoiceActivity(
practiceCardController: this,
currentActivity: currentActivity!,
);
default:
ErrorHandler.logError(

@ -58,17 +58,9 @@ class TargetTokensController {
return _targetTokens = [];
}
_targetTokens = [];
for (int i = 0; i < tokens.length; i++) {
//don't bother with tokens that we don't save to vocab
if (!tokens[i].lemma.saveVocab) {
continue;
}
_targetTokens!.add(tokens[i].emptyTokenWithXP);
}
return _targetTokens!;
return _targetTokens = tokens
.map((token) => token.emptyTokenWithXP)
.toList();
}
Future<void> updateTokensWithConstructs(
@ -84,6 +76,12 @@ class TargetTokensController {
_targetTokens ??= await _initialize(context, pangeaMessageEvent);
for (final token in _targetTokens!) {
// we don't need to do this for tokens that don't have saveVocab set to true
if (!token.token.lemma.saveVocab){
continue;
}
for (final construct in token.constructs) {
final constructUseModel = constructList.getConstructUses(
construct.id.lemma,

@ -0,0 +1,69 @@
import 'package:fluffychat/pangea/widgets/chat/tts_controller.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
class WordAudioButton extends StatefulWidget {
final String text;
const WordAudioButton({
super.key,
required this.text,
});
@override
WordAudioButtonState createState() => WordAudioButtonState();
}
class WordAudioButtonState extends State<WordAudioButton> {
bool _isPlaying = false;
TtsController ttsController = TtsController();
@override
@override
void initState() {
// TODO: implement initState
super.initState();
ttsController.setupTTS().then((value) => setState(() {}));
}
@override
Widget build(BuildContext context) {
return Column(
children: [
IconButton(
icon: const Icon(Icons.play_arrow_outlined),
isSelected: _isPlaying,
selectedIcon: const Icon(Icons.pause_outlined),
color: _isPlaying ? Colors.white : null,
style: ButtonStyle(
backgroundColor: WidgetStateProperty.all(
_isPlaying
? Theme.of(context).colorScheme.secondary
: Theme.of(context).colorScheme.primaryContainer,
),
),
tooltip:
_isPlaying ? L10n.of(context)!.stop : L10n.of(context)!.playAudio,
onPressed: () async {
if (_isPlaying) {
await ttsController.tts.stop();
setState(() {
_isPlaying = false;
});
} else {
setState(() {
_isPlaying = true;
});
await ttsController.speak(widget.text);
setState(() {
_isPlaying = false;
});
}
}, // Disable button if language isn't supported
),
ttsController.missingVoiceButton,
],
);
}
}

@ -0,0 +1,173 @@
import 'dart:developer';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/controllers/my_analytics_controller.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/multiple_choice_activity_model.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_model.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_record_model.dart';
import 'package:fluffychat/pangea/widgets/chat/tts_controller.dart';
import 'package:fluffychat/pangea/widgets/practice_activity/practice_activity_card.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
class WordFocusListeningActivity extends StatefulWidget {
final PracticeActivityModel activity;
final MessagePracticeActivityCardState practiceCardController;
const WordFocusListeningActivity({
super.key,
required this.activity,
required this.practiceCardController,
});
@override
WordFocusListeningActivityState createState() =>
WordFocusListeningActivityState();
ActivityContent get activityContent => activity.content;
}
class WordFocusListeningActivityState
extends State<WordFocusListeningActivity> {
int? selectedChoiceIndex;
TtsController tts = TtsController();
final double buttonSize = 40;
PracticeActivityRecordModel? get currentRecordModel =>
widget.practiceCardController.currentCompletionRecord;
initializeTTS() async {
tts.setupTTS().then((value) => setState(() {}));
}
@override
void initState() {
super.initState();
initializeTTS();
}
void checkAnswer(int index) {
final String value = widget.activityContent.choices[index];
if (currentRecordModel?.hasTextResponse(value) ?? false) {
return;
}
final bool isCorrect = widget.activity.content.isCorrect(value, index);
currentRecordModel?.addResponse(
text: value,
score: isCorrect ? 1 : 0,
);
if (currentRecordModel == null ||
currentRecordModel!.latestResponse == null) {
debugger(when: kDebugMode);
return;
}
MatrixState.pangeaController.myAnalytics.setState(
AnalyticsStream(
// note - this maybe should be the activity event id
eventId:
widget.practiceCardController.widget.pangeaMessageEvent.eventId,
roomId: widget.practiceCardController.widget.pangeaMessageEvent.room.id,
constructs: currentRecordModel!.latestResponse!.toUses(
widget.practiceCardController.currentActivity!,
widget.practiceCardController.metadata,
),
),
);
setState(() {
selectedChoiceIndex = index;
});
}
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
return Column(
children: [
// Text question at the top
Text(
widget.activityContent.question,
style: const TextStyle(
fontSize: 20,
fontWeight: FontWeight.bold,
),
textAlign: TextAlign.center,
),
const SizedBox(height: 20),
// Blank slot for the answer
DragTarget<int>(
builder: (context, candidateData, rejectedData) {
return CircleAvatar(
radius: buttonSize,
backgroundColor: Colors.transparent,
child: Container(
decoration: BoxDecoration(
shape: BoxShape.circle,
border: Border.all(
color: AppConfig.primaryColor.withOpacity(0.4),
width: 2,
style: BorderStyle.solid,
),
),
),
);
},
onAcceptWithDetails: (details) => checkAnswer(details.data),
),
const SizedBox(height: 10),
// Audio options as draggable buttons
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: List.generate(
widget.activityContent.choices.length,
(index) => Draggable<int>(
data: index,
feedback: _buildAudioButton(context, theme, index),
childWhenDragging: _buildAudioButton(context, theme, index, true),
child: _buildAudioButton(context, theme, index),
),
),
),
],
);
}
// Helper method to build the audio buttons
Widget _buildAudioButton(
BuildContext context,
ThemeData theme,
int index, [
bool dragging = false,
]) {
final isAnswerCorrect = widget.activityContent.isCorrect(
widget.activityContent.choices[index],
index,
);
Color buttonColor;
if (selectedChoiceIndex == index) {
buttonColor = isAnswerCorrect
? theme.colorScheme.secondary.withOpacity(0.7) // Correct: Green
: theme.colorScheme.error.withOpacity(0.7); // Incorrect: Red
} else {
buttonColor =
AppConfig.primaryColor.withOpacity(0.4); // Default: Primary color
}
return GestureDetector(
onTap: () => tts.speak(widget.activityContent.choices[index]),
child: CircleAvatar(
radius: buttonSize,
backgroundColor: dragging ? Colors.grey.withOpacity(0.5) : buttonColor,
child: const Icon(Icons.play_arrow),
),
);
}
}

@ -17,6 +17,7 @@ import firebase_messaging
import flutter_app_badger
import flutter_local_notifications
import flutter_secure_storage_macos
import flutter_tts
import flutter_web_auth_2
import flutter_webrtc
import geolocator_apple
@ -54,6 +55,7 @@ func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {
FlutterAppBadgerPlugin.register(with: registry.registrar(forPlugin: "FlutterAppBadgerPlugin"))
FlutterLocalNotificationsPlugin.register(with: registry.registrar(forPlugin: "FlutterLocalNotificationsPlugin"))
FlutterSecureStoragePlugin.register(with: registry.registrar(forPlugin: "FlutterSecureStoragePlugin"))
FlutterTtsPlugin.register(with: registry.registrar(forPlugin: "FlutterTtsPlugin"))
FlutterWebAuth2Plugin.register(with: registry.registrar(forPlugin: "FlutterWebAuth2Plugin"))
FlutterWebRTCPlugin.register(with: registry.registrar(forPlugin: "FlutterWebRTCPlugin"))
GeolocatorPlugin.register(with: registry.registrar(forPlugin: "GeolocatorPlugin"))

@ -33,6 +33,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "6.4.1"
android_intent_plus:
dependency: "direct main"
description:
name: android_intent_plus
sha256: "38921ec22ebb3b9a7eb678792cf6fab0b6f458b61b9d327688573449c9b47db3"
url: "https://pub.dev"
source: hosted
version: "5.2.0"
animations:
dependency: "direct main"
description:
@ -829,6 +837,14 @@ packages:
description: flutter
source: sdk
version: "0.0.0"
flutter_tts:
dependency: "direct main"
description:
name: flutter_tts
sha256: aed2a00c48c43af043ed81145fd8503ddd793dafa7088ab137dbef81a703e53d
url: "https://pub.dev"
source: hosted
version: "4.0.2"
flutter_typeahead:
dependency: "direct main"
description:

@ -107,6 +107,7 @@ dependencies:
wakelock_plus: ^1.2.2
webrtc_interface: ^1.0.13
# #Pangea
android_intent_plus: ^5.2.0
country_picker: ^2.0.25
csv: ^6.0.0
fl_chart: ^0.67.0
@ -128,6 +129,7 @@ dependencies:
shimmer: ^3.0.0
syncfusion_flutter_xlsio: ^25.1.40
rive: 0.11.11
flutter_tts: ^4.0.2
# Pangea#
dev_dependencies:

@ -11,6 +11,7 @@
#include <file_selector_windows/file_selector_windows.h>
#include <firebase_core/firebase_core_plugin_c_api.h>
#include <flutter_secure_storage_windows/flutter_secure_storage_windows_plugin.h>
#include <flutter_tts/flutter_tts_plugin.h>
#include <flutter_webrtc/flutter_web_r_t_c_plugin.h>
#include <pasteboard/pasteboard_plugin.h>
#include <permission_handler_windows/permission_handler_windows_plugin.h>
@ -33,6 +34,8 @@ void RegisterPlugins(flutter::PluginRegistry* registry) {
registry->GetRegistrarForPlugin("FirebaseCorePluginCApi"));
FlutterSecureStorageWindowsPluginRegisterWithRegistrar(
registry->GetRegistrarForPlugin("FlutterSecureStorageWindowsPlugin"));
FlutterTtsPluginRegisterWithRegistrar(
registry->GetRegistrarForPlugin("FlutterTtsPlugin"));
FlutterWebRTCPluginRegisterWithRegistrar(
registry->GetRegistrarForPlugin("FlutterWebRTCPlugin"));
PasteboardPluginRegisterWithRegistrar(

@ -8,6 +8,7 @@ list(APPEND FLUTTER_PLUGIN_LIST
file_selector_windows
firebase_core
flutter_secure_storage_windows
flutter_tts
flutter_webrtc
pasteboard
permission_handler_windows

Loading…
Cancel
Save