Merge pull request #436 from pangeachat/pos-morph

Pos-morph
pull/1384/head
ggurdin 1 year ago committed by GitHub
commit 63cfe2a481
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -93,7 +93,6 @@ class PangeaLanguage {
} }
static LanguageModel byLangCode(String langCode) { static LanguageModel byLangCode(String langCode) {
final list = _langList;
for (final element in _langList) { for (final element in _langList) {
if (element.langCode == langCode) return element; if (element.langCode == langCode) return element;
} }

@ -0,0 +1,13 @@
enum ActivityDisplayInstructionsEnum { highlight, hide }
extension ActivityDisplayInstructionsEnumExt
on ActivityDisplayInstructionsEnum {
String get string {
switch (this) {
case ActivityDisplayInstructionsEnum.highlight:
return 'highlight';
case ActivityDisplayInstructionsEnum.hide:
return 'hide';
}
}
}

@ -687,7 +687,8 @@ class PangeaMessageEvent {
for (final itStep in originalSent!.choreo!.itSteps) { for (final itStep in originalSent!.choreo!.itSteps) {
for (final continuance in itStep.continuances) { for (final continuance in itStep.continuances) {
// this seems to always be false for continuances right now final List<PangeaToken> tokensToSave =
continuance.tokens.where((t) => t.lemma.saveVocab).toList();
if (originalSent!.choreo!.finalMessage.contains(continuance.text)) { if (originalSent!.choreo!.finalMessage.contains(continuance.text)) {
continue; continue;
@ -695,18 +696,21 @@ class PangeaMessageEvent {
if (continuance.wasClicked) { if (continuance.wasClicked) {
//PTODO - account for end of flow score //PTODO - account for end of flow score
if (continuance.level != ChoreoConstants.levelThresholdForGreen) { if (continuance.level != ChoreoConstants.levelThresholdForGreen) {
uses.addAll( for (final token in tokensToSave) {
_lemmasToVocabUses( uses.add(
continuance.lemmas, _lemmaToVocabUse(
token.lemma,
ConstructUseTypeEnum.incIt, ConstructUseTypeEnum.incIt,
), ),
); );
} }
}
} else { } else {
if (continuance.level != ChoreoConstants.levelThresholdForGreen) { if (continuance.level != ChoreoConstants.levelThresholdForGreen) {
uses.addAll( for (final token in tokensToSave) {
_lemmasToVocabUses( uses.add(
continuance.lemmas, _lemmaToVocabUse(
token.lemma,
ConstructUseTypeEnum.ignIt, ConstructUseTypeEnum.ignIt,
), ),
); );
@ -714,6 +718,7 @@ class PangeaMessageEvent {
} }
} }
} }
}
return uses; return uses;
} }
@ -728,14 +733,16 @@ class PangeaMessageEvent {
} }
// for each token, record whether selected in ga, ta, or wa // for each token, record whether selected in ga, ta, or wa
for (final token in originalSent!.tokens!) { for (final token in originalSent!.tokens!
uses.addAll(_getVocabUseForToken(token)); .where((token) => token.lemma.saveVocab)
.toList()) {
uses.add(_getVocabUseForToken(token));
} }
return uses; return uses;
} }
/// Returns a list of [OneConstructUse] objects for the given [token] /// Returns a [OneConstructUse] for the given [token]
/// If there is no [originalSent] or [originalSent.choreo], the [token] is /// If there is no [originalSent] or [originalSent.choreo], the [token] is
/// considered to be a [ConstructUseTypeEnum.wa] as long as it matches the target language. /// considered to be a [ConstructUseTypeEnum.wa] as long as it matches the target language.
/// Later on, we may want to consider putting it in some category of like 'pending' /// Later on, we may want to consider putting it in some category of like 'pending'
@ -744,11 +751,11 @@ class PangeaMessageEvent {
/// If the [token] is in the [originalSent.choreo.acceptedOrIgnoredMatch.choices], /// If the [token] is in the [originalSent.choreo.acceptedOrIgnoredMatch.choices],
/// it is considered to be a [ConstructUseTypeEnum.corIt]. /// it is considered to be a [ConstructUseTypeEnum.corIt].
/// If the [token] is not included in any choreoStep, it is considered to be a [ConstructUseTypeEnum.wa]. /// If the [token] is not included in any choreoStep, it is considered to be a [ConstructUseTypeEnum.wa].
List<OneConstructUse> _getVocabUseForToken(PangeaToken token) { OneConstructUse _getVocabUseForToken(PangeaToken token) {
if (originalSent?.choreo == null) { if (originalSent?.choreo == null) {
final bool inUserL2 = originalSent?.langCode == l2Code; final bool inUserL2 = originalSent?.langCode == l2Code;
return _lemmasToVocabUses( return _lemmaToVocabUse(
token.lemmas, token.lemma,
inUserL2 ? ConstructUseTypeEnum.wa : ConstructUseTypeEnum.unk, inUserL2 ? ConstructUseTypeEnum.wa : ConstructUseTypeEnum.unk,
); );
} }
@ -763,31 +770,25 @@ class PangeaMessageEvent {
step.text.contains(r.value), step.text.contains(r.value),
) ?? ) ??
false)) { false)) {
return _lemmasToVocabUses(token.lemmas, ConstructUseTypeEnum.ga); return _lemmaToVocabUse(token.lemma, ConstructUseTypeEnum.ga);
} }
if (step.itStep != null) { if (step.itStep != null) {
final bool pickedThroughIT = final bool pickedThroughIT =
step.itStep!.chosenContinuance?.text.contains(token.text.content) ?? step.itStep!.chosenContinuance?.text.contains(token.text.content) ??
false; false;
if (pickedThroughIT) { if (pickedThroughIT) {
return _lemmasToVocabUses(token.lemmas, ConstructUseTypeEnum.corIt); return _lemmaToVocabUse(token.lemma, ConstructUseTypeEnum.corIt);
//PTODO - check if added via custom input in IT flow //PTODO - check if added via custom input in IT flow
} }
} }
} }
return _lemmasToVocabUses(token.lemmas, ConstructUseTypeEnum.wa); return _lemmaToVocabUse(token.lemma, ConstructUseTypeEnum.wa);
} }
/// Convert a list of [lemmas] into a list of vocab uses OneConstructUse _lemmaToVocabUse(
/// with the given [type] Lemma lemma,
List<OneConstructUse> _lemmasToVocabUses(
List<Lemma> lemmas,
ConstructUseTypeEnum type, ConstructUseTypeEnum type,
) { ) =>
final List<OneConstructUse> uses = [];
for (final lemma in lemmas) {
if (lemma.saveVocab) {
uses.add(
OneConstructUse( OneConstructUse(
useType: type, useType: type,
chatId: event.roomId!, chatId: event.roomId!,
@ -796,12 +797,7 @@ class PangeaMessageEvent {
form: lemma.form, form: lemma.form,
msgId: event.eventId, msgId: event.eventId,
constructType: ConstructTypeEnum.vocab, constructType: ConstructTypeEnum.vocab,
),
); );
}
}
return uses;
}
/// get construct uses of type grammar for the message /// get construct uses of type grammar for the message
List<OneConstructUse> get _grammarConstructUses { List<OneConstructUse> get _grammarConstructUses {

@ -2,11 +2,10 @@ import 'package:collection/collection.dart';
import 'package:fluffychat/pangea/constants/choreo_constants.dart'; import 'package:fluffychat/pangea/constants/choreo_constants.dart';
import 'package:fluffychat/pangea/constants/model_keys.dart'; import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:fluffychat/pangea/extensions/my_list_extension.dart'; import 'package:fluffychat/pangea/extensions/my_list_extension.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart'; import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'lemma.dart';
class ITResponseModel { class ITResponseModel {
String fullTextTranslation; String fullTextTranslation;
List<Continuance> continuances; List<Continuance> continuances;
@ -79,7 +78,7 @@ class Continuance {
double probability; double probability;
int level; int level;
String text; String text;
List<Lemma> lemmas; List<PangeaToken> tokens;
/// saving this in a full json form /// saving this in a full json form
String description; String description;
@ -99,18 +98,17 @@ class Continuance {
required this.inDictionary, required this.inDictionary,
required this.hasInfo, required this.hasInfo,
required this.gold, required this.gold,
required this.lemmas, required this.tokens,
}); });
factory Continuance.fromJson(Map<String, dynamic> json) { factory Continuance.fromJson(Map<String, dynamic> json) {
final List<Lemma> lemmaInternal = final List<PangeaToken> tokensInternal = (json[ModelKey.tokens] != null)
(json[ModelKey.lemma] != null && json[ModelKey.lemma] is Iterable) ? (json[ModelKey.tokens] as Iterable)
? (json[ModelKey.lemma] as Iterable) .map<PangeaToken>(
.map<Lemma>( (e) => PangeaToken.fromJson(e as Map<String, dynamic>),
(e) => Lemma.fromJson(e as Map<String, dynamic>),
) )
.toList() .toList()
.cast<Lemma>() .cast<PangeaToken>()
: []; : [];
return Continuance( return Continuance(
probability: json['probability'].toDouble(), probability: json['probability'].toDouble(),
@ -122,7 +120,7 @@ class Continuance {
wasClicked: json['clkd'] ?? false, wasClicked: json['clkd'] ?? false,
hasInfo: json['has_info'] ?? false, hasInfo: json['has_info'] ?? false,
gold: json['gold'] ?? false, gold: json['gold'] ?? false,
lemmas: lemmaInternal, tokens: tokensInternal,
); );
} }
@ -132,7 +130,7 @@ class Continuance {
data['level'] = level; data['level'] = level;
data['text'] = text; data['text'] = text;
data['clkd'] = wasClicked; data['clkd'] = wasClicked;
data[ModelKey.lemma] = lemmas.map((e) => e.toJson()).toList(); data[ModelKey.tokens] = tokens.map((e) => e.toJson()).toList();
if (!condensed) { if (!condensed) {
data['description'] = description; data['description'] = description;

@ -8,22 +8,13 @@ class Lemma {
/// [saveVocab] true - whether to save the lemma to the user's vocabulary /// [saveVocab] true - whether to save the lemma to the user's vocabulary
/// vocab that are not saved: emails, urls, numbers, punctuation, etc. /// vocab that are not saved: emails, urls, numbers, punctuation, etc.
/// server handles this determination
final bool saveVocab; final bool saveVocab;
/// [pos] ex "v" - part of speech of the lemma
/// https://universaldependencies.org/u/pos/
final String pos;
/// [morph] ex {} - morphological features of the lemma
/// https://universaldependencies.org/u/feat/
final Map<String, dynamic> morph;
Lemma({ Lemma({
required this.text, required this.text,
required this.saveVocab, required this.saveVocab,
required this.form, required this.form,
this.pos = '',
this.morph = const {},
}); });
factory Lemma.fromJson(Map<String, dynamic> json) { factory Lemma.fromJson(Map<String, dynamic> json) {
@ -31,8 +22,6 @@ class Lemma {
text: json['text'], text: json['text'],
saveVocab: json['save_vocab'] ?? json['saveVocab'] ?? false, saveVocab: json['save_vocab'] ?? json['saveVocab'] ?? false,
form: json["form"] ?? json['text'], form: json["form"] ?? json['text'],
pos: json['pos'] ?? '',
morph: json['morph'] ?? {},
); );
} }
@ -41,8 +30,6 @@ class Lemma {
'text': text, 'text': text,
'save_vocab': saveVocab, 'save_vocab': saveVocab,
'form': form, 'form': form,
'pos': pos,
'morph': morph,
}; };
} }

@ -1,55 +1,60 @@
import 'dart:developer'; import 'dart:developer';
import 'package:flutter/foundation.dart'; import 'package:flutter/foundation.dart';
import 'package:sentry_flutter/sentry_flutter.dart';
import '../constants/model_keys.dart'; import '../constants/model_keys.dart';
import '../utils/error_handler.dart';
import 'lemma.dart'; import 'lemma.dart';
class PangeaToken { class PangeaToken {
PangeaTokenText text; PangeaTokenText text;
List<Lemma> lemmas; Lemma lemma;
/// [pos] ex "VERB" - part of speech of the token
/// https://universaldependencies.org/u/pos/
final String pos;
/// [morph] ex {} - morphological features of the token
/// https://universaldependencies.org/u/feat/
final Map<String, dynamic> morph;
PangeaToken({ PangeaToken({
required this.text, required this.text,
required this.lemmas, required this.lemma,
required this.pos,
required this.morph,
}); });
static getLemmas(String text, Iterable? json) { static Lemma _getLemmas(String text, dynamic json) {
if (json != null) { if (json != null) {
// July 24, 2024 - we're changing from a list to a single lemma and this is for backwards compatibility
// previously sent tokens have lists of lemmas
if (json is Iterable) {
return json return json
.map<Lemma>( .map<Lemma>(
(e) => Lemma.fromJson(e as Map<String, dynamic>), (e) => Lemma.fromJson(e as Map<String, dynamic>),
) )
.toList() .toList()
.cast<Lemma>(); .cast<Lemma>()
.firstOrNull ??
Lemma(text: text, saveVocab: false, form: text);
} else {
return Lemma.fromJson(json);
}
} else { } else {
return [Lemma(text: text, saveVocab: false, form: text)]; // earlier still, we didn't have lemmas so this is for really old tokens
return Lemma(text: text, saveVocab: false, form: text);
} }
} }
factory PangeaToken.fromJson(Map<String, dynamic> json) { factory PangeaToken.fromJson(Map<String, dynamic> json) {
try {
final PangeaTokenText text = final PangeaTokenText text =
PangeaTokenText.fromJson(json[_textKey] as Map<String, dynamic>); PangeaTokenText.fromJson(json[_textKey] as Map<String, dynamic>);
return PangeaToken( return PangeaToken(
text: text, text: text,
lemmas: getLemmas(text.content, json[_lemmaKey]), lemma: _getLemmas(text.content, json[_lemmaKey]),
); pos: json['pos'] ?? '',
} catch (err, s) { morph: json['morph'] ?? {},
debugger(when: kDebugMode);
Sentry.addBreadcrumb(
Breadcrumb(
message: "PangeaToken.fromJson error",
data: {
"json": json,
},
),
); );
ErrorHandler.logError(e: err, s: s);
rethrow;
}
} }
static const String _textKey = "text"; static const String _textKey = "text";
@ -57,7 +62,9 @@ class PangeaToken {
Map<String, dynamic> toJson() => { Map<String, dynamic> toJson() => {
_textKey: text.toJson(), _textKey: text.toJson(),
_lemmaKey: lemmas.map((e) => e.toJson()).toList(), _lemmaKey: [lemma.toJson()],
'pos': pos,
'morph': morph,
}; };
int get end => text.offset + text.length; int get end => text.offset + text.length;

@ -1,15 +1,18 @@
import 'package:fluffychat/config/app_config.dart'; import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/practice_activity_model.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
class MultipleChoice { class MultipleChoice {
final String question; final String question;
final List<String> choices; final List<String> choices;
final String answer; final String answer;
final RelevantSpanDisplayDetails? spanDisplayDetails;
MultipleChoice({ MultipleChoice({
required this.question, required this.question,
required this.choices, required this.choices,
required this.answer, required this.answer,
this.spanDisplayDetails,
}); });
bool isCorrect(int index) => index == correctAnswerIndex; bool isCorrect(int index) => index == correctAnswerIndex;
@ -28,6 +31,9 @@ class MultipleChoice {
question: json['question'] as String, question: json['question'] as String,
choices: (json['choices'] as List).map((e) => e as String).toList(), choices: (json['choices'] as List).map((e) => e as String).toList(),
answer: json['answer'] ?? json['correct_answer'] as String, answer: json['answer'] ?? json['correct_answer'] as String,
spanDisplayDetails: json['span_display_details'] != null
? RelevantSpanDisplayDetails.fromJson(json['span_display_details'])
: null,
); );
} }
@ -36,6 +42,7 @@ class MultipleChoice {
'question': question, 'question': question,
'choices': choices, 'choices': choices,
'answer': answer, 'answer': answer,
'span_display_details': spanDisplayDetails,
}; };
} }
} }

@ -1,5 +1,7 @@
import 'dart:developer'; import 'dart:developer';
import 'package:collection/collection.dart';
import 'package:fluffychat/pangea/enum/activity_display_instructions_enum.dart';
import 'package:fluffychat/pangea/enum/activity_type_enum.dart'; import 'package:fluffychat/pangea/enum/activity_type_enum.dart';
import 'package:fluffychat/pangea/enum/construct_type_enum.dart'; import 'package:fluffychat/pangea/enum/construct_type_enum.dart';
import 'package:fluffychat/pangea/models/practice_activities.dart/multiple_choice_activity_model.dart'; import 'package:fluffychat/pangea/models/practice_activities.dart/multiple_choice_activity_model.dart';
@ -279,4 +281,58 @@ class PracticeActivityModel {
'free_response': freeResponse?.toJson(), 'free_response': freeResponse?.toJson(),
}; };
} }
RelevantSpanDisplayDetails? getRelevantSpanDisplayDetails() {
switch (activityType) {
case ActivityTypeEnum.multipleChoice:
return multipleChoice?.spanDisplayDetails;
case ActivityTypeEnum.listening:
return null;
case ActivityTypeEnum.speaking:
return null;
case ActivityTypeEnum.freeResponse:
return null;
default:
debugger(when: kDebugMode);
return null;
}
}
}
/// For those activities with a relevant span, this class will hold the details
/// of the span and how it should be displayed
/// e.g. hide the span for conjugation activities
class RelevantSpanDisplayDetails {
final int offset;
final int length;
final ActivityDisplayInstructionsEnum displayInstructions;
RelevantSpanDisplayDetails({
required this.offset,
required this.length,
required this.displayInstructions,
});
factory RelevantSpanDisplayDetails.fromJson(Map<String, dynamic> json) {
final ActivityDisplayInstructionsEnum? display =
ActivityDisplayInstructionsEnum.values.firstWhereOrNull(
(e) => e.string == json['display_instructions'],
);
if (display == null) {
debugger(when: kDebugMode);
}
return RelevantSpanDisplayDetails(
offset: json['offset'] as int,
length: json['length'] as int,
displayInstructions: display ?? ActivityDisplayInstructionsEnum.hide,
);
}
Map<String, dynamic> toJson() {
return {
'offset': offset,
'length': length,
'display_instructions': displayInstructions,
};
}
} }

@ -47,23 +47,33 @@ class IgcRepo {
tokens: [ tokens: [
PangeaToken( PangeaToken(
text: PangeaTokenText(content: "This", offset: 0, length: 4), text: PangeaTokenText(content: "This", offset: 0, length: 4),
lemmas: [Lemma(form: "This", text: "this", saveVocab: true)], lemma: Lemma(form: "This", text: "this", saveVocab: true),
pos: "DET",
morph: {},
), ),
PangeaToken( PangeaToken(
text: PangeaTokenText(content: "be", offset: 5, length: 2), text: PangeaTokenText(content: "be", offset: 5, length: 2),
lemmas: [Lemma(form: "be", text: "be", saveVocab: true)], lemma: Lemma(form: "be", text: "be", saveVocab: true),
pos: "VERB",
morph: {},
), ),
PangeaToken( PangeaToken(
text: PangeaTokenText(content: "a", offset: 8, length: 1), text: PangeaTokenText(content: "a", offset: 8, length: 1),
lemmas: [], lemma: Lemma(form: "a", text: "a", saveVocab: true),
pos: "DET",
morph: {},
), ),
PangeaToken( PangeaToken(
text: PangeaTokenText(content: "sample", offset: 10, length: 6), text: PangeaTokenText(content: "sample", offset: 10, length: 6),
lemmas: [], lemma: Lemma(form: "sample", text: "sample", saveVocab: true),
pos: "NOUN",
morph: {},
), ),
PangeaToken( PangeaToken(
text: PangeaTokenText(content: "text", offset: 17, length: 4), text: PangeaTokenText(content: "text", offset: 17, length: 4),
lemmas: [], lemma: Lemma(form: "text", text: "text", saveVocab: true),
pos: "NOUN",
morph: {},
), ),
], ],
matches: [ matches: [

Loading…
Cancel
Save