cards-native/Sources/Core/Sync/GradeQueue.swift
Till JS 3b861af3fb v0.3.0 — Phase β-2 Study-Loop
Voller Lern-Flow mit Web-Parität: fällige Karten via /reviews/due
laden, flip + rate (4 Buttons + Haptic), Grades via Offline-Queue
ans Server-FSRS schicken.

- Card/Review/DueReview DTOs mit snake_case + camelCase-deckId-
  Sonderfall im embedded card-Subobjekt
- CardType-Enum (alle 7 Typen), Rating-Enum mit deutschen Labels
- Cloze-Helper 1:1-Port aus cards-domain (extractClusterIds,
  subIndexCount, clusterId, renderPrompt/Answer, hint)
- CardsAPI.dueReviews(deckId:) + gradeReview(cardId,subIndex,rating,reviewedAt)
- PendingGrade SwiftData-Model + GradeQueue (FIFO-Drain, originaler
  Timestamp bleibt, bei Netzfehler in Queue, Retry beim nächsten Drain)
- StudySession @Observable State-Machine
- CardRenderer für basic, basic-reverse, cloze; Placeholder für
  image-occlusion/audio-front/typing/multiple-choice (β-3/β-4)
- RatingBar mit UIImpactFeedbackGenerator (medium/heavy)
- StudySessionView per NavigationLink aus DeckListView
- 9 neue Tests (Cloze: 8, Review-Decoding: 3), insgesamt 17 grün

Server-authoritative FSRS bleibt — kein ts-fsrs-Port.
Endurance-Test auf realem Gerät steht aus (siehe PLAN.md).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-13 00:16:11 +02:00

91 lines
2.9 KiB
Swift

import Foundation
import ManaCore
import Observation
import SwiftData
/// Persistente Offline-Queue für Grade-Aktionen. Drain-Loop kann
/// vom UI ausgelöst werden (bei Reconnect oder App-Foreground).
@MainActor
@Observable
final class GradeQueue {
private(set) var isDraining = false
private(set) var lastDrainError: String?
private let api: CardsAPI
private let context: ModelContext
init(api: CardsAPI, context: ModelContext) {
self.api = api
self.context = context
}
/// Enqueue + sofort versuchen zu senden. Bei Fehler bleibt der
/// Eintrag in der Queue.
func submit(cardId: String, subIndex: Int, rating: Rating, reviewedAt: Date = .now) async {
let grade = PendingGrade(
cardId: cardId,
subIndex: subIndex,
rating: rating,
reviewedAt: reviewedAt
)
context.insert(grade)
try? context.save()
Log.study.info(
"Queued grade for \(cardId, privacy: .public)/\(subIndex, privacy: .public): \(rating.rawValue, privacy: .public)"
)
await drain()
}
/// Schickt alle pending grades in FIFO-Reihenfolge ab. Bei Server-
/// Erfolg: aus Queue löschen. Bei Netzfehler: Loop abbrechen
/// (kommender Drain probiert es nochmal).
func drain() async {
guard !isDraining else { return }
isDraining = true
defer { isDraining = false }
let descriptor = FetchDescriptor<PendingGrade>(
sortBy: [SortDescriptor(\.queuedAt, order: .forward)]
)
let pending = (try? context.fetch(descriptor)) ?? []
guard !pending.isEmpty else {
lastDrainError = nil
return
}
for grade in pending {
guard let rating = grade.rating else {
context.delete(grade)
continue
}
do {
_ = try await api.gradeReview(
cardId: grade.cardId,
subIndex: grade.subIndex,
rating: rating,
reviewedAt: grade.reviewedAt
)
context.delete(grade)
try? context.save()
} catch {
let msg = (error as? LocalizedError)?.errorDescription ?? String(describing: error)
grade.lastTryAt = .now
grade.lastError = msg
try? context.save()
lastDrainError = msg
Log.study.notice(
"Drain stopped for \(grade.cardId, privacy: .public)/\(grade.subIndex, privacy: .public): \(msg, privacy: .public)"
)
return
}
}
lastDrainError = nil
Log.study.info("Drain complete")
}
/// Wie viele Grades hängen aktuell offline?
func pendingCount() -> Int {
let descriptor = FetchDescriptor<PendingGrade>()
return (try? context.fetchCount(descriptor)) ?? 0
}
}