From 96a53f936e4fffe5686e1889abb2cdb35db8da74 Mon Sep 17 00:00:00 2001 From: RumovZ Date: Sat, 4 Nov 2023 08:21:08 +0100 Subject: [PATCH 1/6] Allow im-/exporting with or without deck configs Closes #2777. --- ftl/core/exporting.ftl | 1 + ftl/core/importing.ftl | 4 +++ proto/anki/import_export.proto | 10 ++++-- pylib/anki/collection.py | 13 ++----- qt/aqt/forms/exporting.ui | 10 ++++++ qt/aqt/import_export/exporting.py | 24 ++++++++++--- rslib/src/config/bool.rs | 1 + rslib/src/import_export/gather.rs | 26 ++++++++------ .../src/import_export/package/apkg/export.rs | 30 +++++++--------- .../package/apkg/import/decks.rs | 35 +++++++++---------- .../import_export/package/apkg/import/mod.rs | 10 ++++-- .../package/apkg/import/notes.rs | 7 ++-- rslib/src/import_export/package/apkg/tests.rs | 10 ++++-- rslib/src/import_export/package/mod.rs | 1 + rslib/src/import_export/service.rs | 7 ++-- .../ImportAnkiPackagePage.svelte | 14 ++++++++ 16 files changed, 129 insertions(+), 74 deletions(-) diff --git a/ftl/core/exporting.ftl b/ftl/core/exporting.ftl index 5bab81296c4..5f0914b1748 100644 --- a/ftl/core/exporting.ftl +++ b/ftl/core/exporting.ftl @@ -13,6 +13,7 @@ exporting-include = Include: exporting-include-html-and-media-references = Include HTML and media references exporting-include-media = Include media exporting-include-scheduling-information = Include scheduling information +exporting-include-deck-configs = Include deck options exporting-include-tags = Include tags exporting-support-older-anki-versions = Support older Anki versions (slower/larger files) exporting-notes-in-plain-text = Notes in Plain Text diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 05659bfed14..2ef6ddc027b 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -51,10 +51,14 @@ importing-notes-skipped-update-due-to-notetype = Notes not updated, as notetype importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val } importing-include-reviews = Include reviews importing-also-import-progress = Also import any learning progress +importing-with-deck-configs = Also import any deck options importing-updates = Updates importing-include-reviews-help = If enabled, any previous reviews that the deck sharer included will also be imported. Otherwise, all cards will be imported as new cards. +importing-with-deck-configs-help = + If enabled, any deck options that the deck sharer included will also be imported. + Otherwise, all decks will be assigned the default preset. importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip) importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) # the '|' character diff --git a/proto/anki/import_export.proto b/proto/anki/import_export.proto index 0ecfc7b3be3..88a7ad163a5 100644 --- a/proto/anki/import_export.proto +++ b/proto/anki/import_export.proto @@ -58,6 +58,7 @@ message ImportAnkiPackageOptions { ImportAnkiPackageUpdateCondition update_notes = 2; ImportAnkiPackageUpdateCondition update_notetypes = 3; bool with_scheduling = 4; + bool with_deck_configs = 5; } message ImportAnkiPackageRequest { @@ -88,10 +89,15 @@ message ImportResponse { message ExportAnkiPackageRequest { string out_path = 1; - bool with_scheduling = 2; + ExportAnkiPackageOptions options = 2; + ExportLimit limit = 3; +} + +message ExportAnkiPackageOptions { + bool with_scheduling = 1; + bool with_deck_configs = 2; bool with_media = 3; bool legacy = 4; - ExportLimit limit = 5; } message PackageMetadata { diff --git a/pylib/anki/collection.py b/pylib/anki/collection.py index c1993130f12..29a0bdc4c39 100644 --- a/pylib/anki/collection.py +++ b/pylib/anki/collection.py @@ -42,6 +42,7 @@ StripHtmlMode = card_rendering_pb2.StripHtmlRequest ImportLogWithChanges = import_export_pb2.ImportResponse ImportAnkiPackageRequest = import_export_pb2.ImportAnkiPackageRequest +ExportAnkiPackageOptions = import_export_pb2.ExportAnkiPackageOptions ImportCsvRequest = import_export_pb2.ImportCsvRequest CsvMetadata = import_export_pb2.CsvMetadata DupeResolution = CsvMetadata.DupeResolution @@ -361,19 +362,11 @@ def import_anki_package( return ImportLogWithChanges.FromString(log) def export_anki_package( - self, - *, - out_path: str, - limit: ExportLimit, - with_scheduling: bool, - with_media: bool, - legacy_support: bool, + self, *, out_path: str, options: ExportAnkiPackageOptions, limit: ExportLimit ) -> int: return self._backend.export_anki_package( out_path=out_path, - with_scheduling=with_scheduling, - with_media=with_media, - legacy=legacy_support, + options=options, limit=pb_export_limit(limit), ) diff --git a/qt/aqt/forms/exporting.ui b/qt/aqt/forms/exporting.ui index cb150f6e7a4..078bab6f04c 100644 --- a/qt/aqt/forms/exporting.ui +++ b/qt/aqt/forms/exporting.ui @@ -67,6 +67,16 @@ + + + + exporting_include_deck_configs + + + true + + + diff --git a/qt/aqt/import_export/exporting.py b/qt/aqt/import_export/exporting.py index 52985606118..f06d7a47aad 100644 --- a/qt/aqt/import_export/exporting.py +++ b/qt/aqt/import_export/exporting.py @@ -12,7 +12,13 @@ import aqt.forms import aqt.main -from anki.collection import DeckIdLimit, ExportLimit, NoteIdsLimit, Progress +from anki.collection import ( + DeckIdLimit, + ExportAnkiPackageOptions, + ExportLimit, + NoteIdsLimit, + Progress, +) from anki.decks import DeckId, DeckNameId from anki.notes import NoteId from aqt import gui_hooks @@ -90,6 +96,9 @@ def setup(self, did: DeckId | None) -> None: def exporter_changed(self, idx: int) -> None: self.exporter = self.exporter_classes[idx]() self.frm.includeSched.setVisible(self.exporter.show_include_scheduling) + self.frm.include_deck_configs.setVisible( + self.exporter.show_include_deck_configs + ) self.frm.includeMedia.setVisible(self.exporter.show_include_media) self.frm.includeTags.setVisible(self.exporter.show_include_tags) self.frm.includeHTML.setVisible(self.exporter.show_include_html) @@ -137,6 +146,7 @@ def options(self, out_path: str) -> ExportOptions: return ExportOptions( out_path=out_path, include_scheduling=self.frm.includeSched.isChecked(), + include_deck_configs=self.frm.include_deck_configs.isChecked(), include_media=self.frm.includeMedia.isChecked(), include_tags=self.frm.includeTags.isChecked(), include_html=self.frm.includeHTML.isChecked(), @@ -170,6 +180,7 @@ def filename(self) -> str: class ExportOptions: out_path: str include_scheduling: bool + include_deck_configs: bool include_media: bool include_tags: bool include_html: bool @@ -184,6 +195,7 @@ class Exporter(ABC): extension: str show_deck_list = False show_include_scheduling = False + show_include_deck_configs = False show_include_media = False show_include_tags = False show_include_html = False @@ -241,6 +253,7 @@ class ApkgExporter(Exporter): extension = "apkg" show_deck_list = True show_include_scheduling = True + show_include_deck_configs = True show_include_media = True show_legacy_support = True @@ -260,9 +273,12 @@ def on_success(count: int) -> None: op=lambda col: col.export_anki_package( out_path=options.out_path, limit=options.limit, - with_scheduling=options.include_scheduling, - with_media=options.include_media, - legacy_support=options.legacy_support, + options=ExportAnkiPackageOptions( + with_scheduling=options.include_scheduling, + with_deck_configs=options.include_deck_configs, + with_media=options.include_media, + legacy=options.legacy_support, + ), ), success=on_success, ).with_backend_progress(export_progress_update).run_in_background() diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index 3d12e4e0e48..6bdf3143cb6 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -36,6 +36,7 @@ pub enum BoolKey { ShiftPositionOfExistingCards, MergeNotetypes, WithScheduling, + WithDeckConfigs, Fsrs, #[strum(to_string = "normalize_note_text")] NormalizeNoteText, diff --git a/rslib/src/import_export/gather.rs b/rslib/src/import_export/gather.rs index 5b3c153043c..9af31ceb796 100644 --- a/rslib/src/import_export/gather.rs +++ b/rslib/src/import_export/gather.rs @@ -36,6 +36,7 @@ impl ExchangeData { col: &mut Collection, search: impl TryIntoSearch, with_scheduling: bool, + with_deck_configs: bool, ) -> Result<()> { self.days_elapsed = col.timing_today()?.days_elapsed; self.creation_utc_offset = col.get_creation_utc_offset(); @@ -43,16 +44,22 @@ impl ExchangeData { self.notes = notes; let (cards, guard) = guard.col.gather_cards()?; self.cards = cards; - self.decks = guard.col.gather_decks(with_scheduling)?; + self.decks = guard + .col + .gather_decks(with_deck_configs, !with_deck_configs)?; self.notetypes = guard.col.gather_notetypes()?; self.check_ids()?; if with_scheduling { self.revlog = guard.col.gather_revlog()?; - self.deck_configs = guard.col.gather_deck_configs(&self.decks)?; } else { self.remove_scheduling_information(guard.col); }; + if with_deck_configs { + self.deck_configs = guard.col.gather_deck_configs(&self.decks)?; + } else { + self.reset_deck_config_ids_and_limits(); + } Ok(()) } @@ -80,7 +87,6 @@ impl ExchangeData { fn remove_scheduling_information(&mut self, col: &Collection) { self.remove_system_tags(); - self.reset_deck_config_ids_and_limits(); self.reset_cards(col); } @@ -183,12 +189,12 @@ impl Collection { .map(|cards| (cards, guard)) } - /// If with_scheduling, also gather all original decks of cards in filtered + /// If with_original, also gather all original decks of cards in filtered /// decks, so they don't have to be converted to regular decks on import. - /// If not with_scheduling, skip exporting the default deck to avoid + /// If skip_default, skip exporting the default deck to avoid /// changing the importing client's defaults. - fn gather_decks(&mut self, with_scheduling: bool) -> Result> { - let decks = if with_scheduling { + fn gather_decks(&mut self, with_original: bool, skip_default: bool) -> Result> { + let decks = if with_original { self.storage.get_decks_and_original_for_search_cards() } else { self.storage.get_decks_for_search_cards() @@ -197,7 +203,7 @@ impl Collection { Ok(decks .into_iter() .chain(parents) - .filter(|deck| with_scheduling || deck.id != DeckId(1)) + .filter(|deck| !(skip_default && deck.id.0 == 1)) .collect()) } @@ -263,7 +269,7 @@ mod test { let mut col = Collection::new(); let note = NoteAdder::basic(&mut col).add(&mut col); - data.gather_data(&mut col, SearchNode::WholeCollection, true) + data.gather_data(&mut col, SearchNode::WholeCollection, true, true) .unwrap(); assert_eq!(data.notes, [note]); @@ -280,7 +286,7 @@ mod test { col.add_note_only_with_id_undoable(&mut note).unwrap(); assert!(data - .gather_data(&mut col, SearchNode::WholeCollection, true) + .gather_data(&mut col, SearchNode::WholeCollection, true, true) .is_err()); } } diff --git a/rslib/src/import_export/package/apkg/export.rs b/rslib/src/import_export/package/apkg/export.rs index 6bf16f0a881..eeb60a36951 100644 --- a/rslib/src/import_export/package/apkg/export.rs +++ b/rslib/src/import_export/package/apkg/export.rs @@ -14,6 +14,7 @@ use crate::collection::CollectionBuilder; use crate::import_export::gather::ExchangeData; use crate::import_export::package::colpkg::export::export_collection; use crate::import_export::package::media::MediaIter; +use crate::import_export::package::ExportAnkiPackageOptions; use crate::import_export::package::Meta; use crate::import_export::ExportProgress; use crate::prelude::*; @@ -21,14 +22,11 @@ use crate::progress::ThrottlingProgressHandler; impl Collection { /// Returns number of exported notes. - #[allow(clippy::too_many_arguments)] pub fn export_apkg( &mut self, out_path: impl AsRef, + options: ExportAnkiPackageOptions, search: impl TryIntoSearch, - with_scheduling: bool, - with_media: bool, - legacy: bool, media_fn: Option) -> MediaIter>>, ) -> Result { let mut progress = self.new_progress_handler(); @@ -38,19 +36,13 @@ impl Collection { .path() .to_str() .or_invalid("non-unicode filename")?; - let meta = if legacy { + let meta = if options.legacy { Meta::new_legacy() } else { Meta::new() }; - let data = self.export_into_collection_file( - &meta, - temp_col_path, - search, - &mut progress, - with_scheduling, - with_media, - )?; + let data = + self.export_into_collection_file(&meta, temp_col_path, options, search, &mut progress)?; progress.set(ExportProgress::File)?; let media = if let Some(media_fn) = media_fn { @@ -77,15 +69,19 @@ impl Collection { &mut self, meta: &Meta, path: &str, + options: ExportAnkiPackageOptions, search: impl TryIntoSearch, progress: &mut ThrottlingProgressHandler, - with_scheduling: bool, - with_media: bool, ) -> Result { let mut data = ExchangeData::default(); progress.set(ExportProgress::Gathering)?; - data.gather_data(self, search, with_scheduling)?; - if with_media { + data.gather_data( + self, + search, + options.with_scheduling, + options.with_deck_configs, + )?; + if options.with_media { data.gather_media_names(progress)?; } diff --git a/rslib/src/import_export/package/apkg/import/decks.rs b/rslib/src/import_export/package/apkg/import/decks.rs index e0ee3b4395d..adbc50f4409 100644 --- a/rslib/src/import_export/package/apkg/import/decks.rs +++ b/rslib/src/import_export/package/apkg/import/decks.rs @@ -32,14 +32,13 @@ impl Context<'_> { pub(super) fn import_decks_and_configs( &mut self, keep_filtered: bool, - contains_scheduling: bool, ) -> Result> { let mut ctx = DeckContext::new(self.target_col, self.usn); ctx.import_deck_configs(mem::take(&mut self.data.deck_configs))?; ctx.import_decks( mem::take(&mut self.data.decks), keep_filtered, - contains_scheduling, + self.with_deck_configs, )?; Ok(ctx.imported_decks) } @@ -58,35 +57,35 @@ impl DeckContext<'_> { &mut self, mut decks: Vec, keep_filtered: bool, - contains_scheduling: bool, + keep_configs: bool, ) -> Result<()> { // ensure parents are seen before children decks.sort_unstable_by_key(|deck| deck.level()); for deck in &mut decks { - self.prepare_deck(deck, keep_filtered, contains_scheduling); + self.prepare_deck(deck, keep_filtered, keep_configs); self.import_deck(deck)?; } Ok(()) } - fn prepare_deck(&self, deck: &mut Deck, keep_filtered: bool, contains_scheduling: bool) { + fn prepare_deck(&self, deck: &mut Deck, keep_filtered: bool, keep_config: bool) { self.maybe_reparent(deck); - if !keep_filtered && deck.is_filtered() { - deck.kind = DeckKind::Normal(NormalDeck { - config_id: 1, - ..Default::default() - }); - } else if !contains_scheduling { - // reset things like today's study count and collapse state - deck.common = Default::default(); - deck.kind = match &mut deck.kind { - DeckKind::Normal(normal) => DeckKind::Normal(NormalDeck { + match &deck.kind { + DeckKind::Filtered(_) if !keep_filtered => { + deck.kind = DeckKind::Normal(NormalDeck { config_id: 1, - description: mem::take(&mut normal.description), ..Default::default() - }), - DeckKind::Filtered(_) => unreachable!(), + }) } + DeckKind::Normal(normal) if !keep_config => { + deck.kind = DeckKind::Normal(NormalDeck { + config_id: 1, + description: normal.description.clone(), + markdown_description: normal.markdown_description, + ..Default::default() + }) + } + _ => (), } } diff --git a/rslib/src/import_export/package/apkg/import/mod.rs b/rslib/src/import_export/package/apkg/import/mod.rs index f1093d9c474..5b7f5655059 100644 --- a/rslib/src/import_export/package/apkg/import/mod.rs +++ b/rslib/src/import_export/package/apkg/import/mod.rs @@ -41,6 +41,7 @@ struct Context<'a> { merge_notetypes: bool, update_notes: UpdateCondition, update_notetypes: UpdateCondition, + with_deck_configs: bool, media_manager: MediaManager, archive: ZipArchive, meta: Meta, @@ -62,6 +63,7 @@ impl Collection { self.transact(Op::Import, |col| { col.set_config(BoolKey::MergeNotetypes, &options.merge_notetypes)?; col.set_config(BoolKey::WithScheduling, &options.with_scheduling)?; + col.set_config(BoolKey::WithDeckConfigs, &options.with_deck_configs)?; col.set_config(ConfigKey::UpdateNotes, &options.update_notes())?; col.set_config(ConfigKey::UpdateNotetypes, &options.update_notetypes())?; let mut ctx = Context::new(archive, col, options, progress)?; @@ -85,6 +87,7 @@ impl<'a> Context<'a> { SearchNode::WholeCollection, &mut progress, options.with_scheduling, + options.with_deck_configs, )?; let usn = target_col.usn()?; Ok(Self { @@ -92,6 +95,7 @@ impl<'a> Context<'a> { merge_notetypes: options.merge_notetypes, update_notes: options.update_notes(), update_notetypes: options.update_notetypes(), + with_deck_configs: options.with_deck_configs, media_manager, archive, meta, @@ -111,8 +115,7 @@ impl<'a> Context<'a> { let mut media_map = self.prepare_media()?; let note_imports = self.import_notes_and_notetypes(&mut media_map)?; let keep_filtered = self.data.enables_filtered_decks(); - let contains_scheduling = self.data.contains_scheduling(); - let imported_decks = self.import_decks_and_configs(keep_filtered, contains_scheduling)?; + let imported_decks = self.import_decks_and_configs(keep_filtered)?; self.import_cards_and_revlog( ¬e_imports.id_map, ¬etypes, @@ -132,6 +135,7 @@ impl ExchangeData { search: impl TryIntoSearch, progress: &mut ThrottlingProgressHandler, with_scheduling: bool, + with_deck_configs: bool, ) -> Result { let tempfile = collection_to_tempfile(meta, archive)?; let mut col = CollectionBuilder::new(tempfile.path()).build()?; @@ -140,7 +144,7 @@ impl ExchangeData { progress.set(ImportProgress::Gathering)?; let mut data = ExchangeData::default(); - data.gather_data(&mut col, search, with_scheduling)?; + data.gather_data(&mut col, search, with_scheduling, with_deck_configs)?; Ok(data) } diff --git a/rslib/src/import_export/package/apkg/import/notes.rs b/rslib/src/import_export/package/apkg/import/notes.rs index 49633fba791..b41237989a6 100644 --- a/rslib/src/import_export/package/apkg/import/notes.rs +++ b/rslib/src/import_export/package/apkg/import/notes.rs @@ -602,6 +602,7 @@ impl Notetype { #[cfg(test)] mod test { + use anki_proto::import_export::ExportAnkiPackageOptions; use anki_proto::import_export::ImportAnkiPackageOptions; use tempfile::TempDir; @@ -961,7 +962,7 @@ mod test { .add(&mut src); let temp_dir = TempDir::new()?; let path = temp_dir.path().join("foo.apkg"); - src.export_apkg(&path, "", false, false, false, None)?; + src.export_apkg(&path, ExportAnkiPackageOptions::default(), "", None)?; let mut dst = CollectionBuilder::new(temp_dir.path().join("dst.anki2")) .with_desktop_media_paths() @@ -980,7 +981,7 @@ mod test { // importing again with merge disabled will fail for the exisitng note, // but the new one will be added with an extra notetype assert_eq!(dst.storage.get_all_notetype_names().unwrap().len(), 7); - src.export_apkg(&path, "", false, false, false, None)?; + src.export_apkg(&path, ExportAnkiPackageOptions::default(), "", None)?; assert_eq!( dst.import_apkg(&path, ImportAnkiPackageOptions::default())? .output @@ -992,7 +993,7 @@ mod test { // if enabling merge, it should succeed and remove the empty notetype, remapping // its note - src.export_apkg(&path, "", false, false, false, None)?; + src.export_apkg(&path, ExportAnkiPackageOptions::default(), "", None)?; assert_eq!( dst.import_apkg( &path, diff --git a/rslib/src/import_export/package/apkg/tests.rs b/rslib/src/import_export/package/apkg/tests.rs index 47b322722a4..9c0e15ecab8 100644 --- a/rslib/src/import_export/package/apkg/tests.rs +++ b/rslib/src/import_export/package/apkg/tests.rs @@ -10,6 +10,7 @@ use std::io::Write; use anki_io::read_file; use anki_proto::import_export::ImportAnkiPackageOptions; +use crate::import_export::package::ExportAnkiPackageOptions; use crate::media::files::sha1_of_data; use crate::media::MediaManager; use crate::prelude::*; @@ -44,10 +45,13 @@ fn roundtrip_inner(legacy: bool) { src_col .export_apkg( &apkg_path, + ExportAnkiPackageOptions { + with_scheduling: true, + with_deck_configs: true, + with_media: true, + legacy, + }, SearchNode::from_deck_name("parent::sample"), - true, - true, - legacy, None, ) .unwrap(); diff --git a/rslib/src/import_export/package/mod.rs b/rslib/src/import_export/package/mod.rs index b99a38ddf45..935fc7e9cac 100644 --- a/rslib/src/import_export/package/mod.rs +++ b/rslib/src/import_export/package/mod.rs @@ -7,6 +7,7 @@ mod media; mod meta; use anki_proto::import_export::media_entries::MediaEntry; +pub use anki_proto::import_export::ExportAnkiPackageOptions; pub use anki_proto::import_export::ImportAnkiPackageOptions; pub use anki_proto::import_export::ImportAnkiPackageUpdateCondition as UpdateCondition; use anki_proto::import_export::MediaEntries; diff --git a/rslib/src/import_export/service.rs b/rslib/src/import_export/service.rs index c51c599068c..c324ebf54d0 100644 --- a/rslib/src/import_export/service.rs +++ b/rslib/src/import_export/service.rs @@ -22,6 +22,7 @@ impl crate::services::ImportExportService for Collection { Ok(anki_proto::import_export::ImportAnkiPackageOptions { merge_notetypes: self.get_config_bool(BoolKey::MergeNotetypes), with_scheduling: self.get_config_bool(BoolKey::WithScheduling), + with_deck_configs: self.get_config_bool(BoolKey::WithDeckConfigs), update_notes: self.get_update_notes() as i32, update_notetypes: self.get_update_notetypes() as i32, }) @@ -33,10 +34,8 @@ impl crate::services::ImportExportService for Collection { ) -> Result { self.export_apkg( &input.out_path, - SearchNode::from(input.limit.unwrap_or_default()), - input.with_scheduling, - input.with_media, - input.legacy, + input.options.unwrap_or_default(), + input.limit.unwrap_or_default(), None, ) .map(Into::into) diff --git a/ts/import-anki-package/ImportAnkiPackagePage.svelte b/ts/import-anki-package/ImportAnkiPackagePage.svelte index b0fc4ff8e45..43cbe57b301 100644 --- a/ts/import-anki-package/ImportAnkiPackagePage.svelte +++ b/ts/import-anki-package/ImportAnkiPackagePage.svelte @@ -29,6 +29,11 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html help: tr.importingIncludeReviewsHelp(), url: HelpPage.PackageImporting.scheduling, }, + withDeckConfigs: { + title: tr.importingWithDeckConfigs(), + help: tr.importingWithDeckConfigsHelp(), + url: HelpPage.PackageImporting.scheduling, + }, mergeNotetypes: { title: tr.importingMergeNotetypes(), help: tr.importingMergeNotetypesHelp(), @@ -84,6 +89,15 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + + + openHelpModal(Object.keys(settings).indexOf("withDeckConfigs"))} + > + {settings.withDeckConfigs.title} + + +
{tr.importingUpdates()} From a61109ecc4a9673b4377d2a051351157560ce1ed Mon Sep 17 00:00:00 2001 From: RumovZ Date: Sun, 5 Nov 2023 09:06:39 +0100 Subject: [PATCH 2/6] Enable webengine remote debugging in launch.json --- .vscode.dist/launch.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.vscode.dist/launch.json b/.vscode.dist/launch.json index dd1a9dcfd57..34028b852b1 100644 --- a/.vscode.dist/launch.json +++ b/.vscode.dist/launch.json @@ -18,7 +18,9 @@ "env": { "PYTHONWARNINGS": "default", "PYTHONPYCACHEPREFIX": "out/pycache", - "ANKIDEV": "1" + "ANKIDEV": "1", + "QTWEBENGINE_REMOTE_DEBUGGING": "8080", + "QTWEBENGINE_CHROMIUM_FLAGS": "--remote-allow-origins=http://localhost:8080" }, "justMyCode": true, "preLaunchTask": "ninja" From 6639657a7a1cc47dcb246133fd98db276953b14e Mon Sep 17 00:00:00 2001 From: RumovZ Date: Thu, 9 Nov 2023 21:10:02 +0100 Subject: [PATCH 3/6] Reset deck limits and counts based on scheduling Also: - Fix `deck.common` not being reset. - Apply all logic only depending on the source collection in the gathering stage. - Skip checking for scheduling and only act based on whether the call wants scheduling. Preservation of filtered decks also depends on all original decks being included. - Fix check_ids() not covering revlog. --- rslib/src/import_export/gather.rs | 82 ++++++++++++++----- .../package/apkg/import/cards.rs | 20 +---- .../package/apkg/import/decks.rs | 43 ++-------- .../import_export/package/apkg/import/mod.rs | 28 +------ 4 files changed, 72 insertions(+), 101 deletions(-) diff --git a/rslib/src/import_export/gather.rs b/rslib/src/import_export/gather.rs index 9af31ceb796..817e585a9a9 100644 --- a/rslib/src/import_export/gather.rs +++ b/rslib/src/import_export/gather.rs @@ -9,6 +9,7 @@ use itertools::Itertools; use super::ExportProgress; use crate::decks::immediate_parent_name; +use crate::decks::NormalDeck; use crate::latex::extract_latex; use crate::prelude::*; use crate::progress::ThrottlingProgressHandler; @@ -44,24 +45,29 @@ impl ExchangeData { self.notes = notes; let (cards, guard) = guard.col.gather_cards()?; self.cards = cards; - self.decks = guard - .col - .gather_decks(with_deck_configs, !with_deck_configs)?; + self.decks = guard.col.gather_decks(with_scheduling, !with_scheduling)?; self.notetypes = guard.col.gather_notetypes()?; - self.check_ids()?; + + // Earlier versions relied on the importer handling filtered decks by converting + // them into regular ones, so there is no guarantee that all original decks + // are included. + let allow_filtered = self.contains_all_original_decks(); if with_scheduling { self.revlog = guard.col.gather_revlog()?; + if !allow_filtered { + self.restore_cards_from_filtered_decks(); + } } else { - self.remove_scheduling_information(guard.col); + self.reset_cards_and_notes(guard.col); }; + if with_deck_configs { self.deck_configs = guard.col.gather_deck_configs(&self.decks)?; - } else { - self.reset_deck_config_ids_and_limits(); } + self.reset_decks(!with_deck_configs, !with_scheduling, allow_filtered); - Ok(()) + self.check_ids() } pub(super) fn gather_media_names( @@ -85,7 +91,7 @@ impl ExchangeData { Ok(()) } - fn remove_scheduling_information(&mut self, col: &Collection) { + fn reset_cards_and_notes(&mut self, col: &Collection) { self.remove_system_tags(); self.reset_cards(col); } @@ -100,26 +106,52 @@ impl ExchangeData { } } - fn reset_deck_config_ids_and_limits(&mut self) { + fn reset_decks( + &mut self, + reset_config_ids: bool, + reset_study_info: bool, + allow_filtered: bool, + ) { for deck in self.decks.iter_mut() { - if let Ok(normal_mut) = deck.normal_mut() { - normal_mut.config_id = 1; - normal_mut.review_limit = None; - normal_mut.review_limit_today = None; - normal_mut.new_limit = None; - normal_mut.new_limit_today = None; - } else { - // filtered decks are reset at import time for legacy reasons + if reset_study_info { + deck.common = Default::default(); + } + match &mut deck.kind { + DeckKind::Normal(normal) => { + if reset_config_ids { + normal.config_id = 1; + } + if reset_study_info { + normal.extend_new = 0; + normal.extend_review = 0; + normal.review_limit = None; + normal.review_limit_today = None; + normal.new_limit = None; + normal.new_limit_today = None; + } + } + DeckKind::Filtered(_) if reset_study_info || !allow_filtered => { + deck.kind = DeckKind::Normal(NormalDeck { + config_id: 1, + ..Default::default() + }) + } + DeckKind::Filtered(_) => (), } } } + fn contains_all_original_decks(&self) -> bool { + self.cards.iter().all(|c| { + c.original_deck_id.0 == 0 || self.decks.iter().any(|d| d.id == c.original_deck_id) + }) + } + fn reset_cards(&mut self, col: &Collection) { let mut position = col.get_next_card_position(); for card in self.cards.iter_mut() { // schedule_as_new() removes cards from filtered decks, but we want to - // leave cards in their current deck, which gets converted to a regular - // deck on import + // leave cards in their current deck, which gets converted to a regular one let deck_id = card.deck_id; if card.schedule_as_new(position, true, true) { position += 1; @@ -129,6 +161,16 @@ impl ExchangeData { } } + fn restore_cards_from_filtered_decks(&mut self) { + for card in self.cards.iter_mut() { + if card.is_filtered() { + // instead of moving between decks, the deck is converted to a regular one + card.original_deck_id = card.deck_id; + card.remove_from_filtered_deck_restoring_queue(); + } + } + } + fn check_ids(&self) -> Result<()> { let tomorrow = TimestampMillis::now().adding_secs(86_400).0; if self diff --git a/rslib/src/import_export/package/apkg/import/cards.rs b/rslib/src/import_export/package/apkg/import/cards.rs index e04bbaf235b..e91695e143d 100644 --- a/rslib/src/import_export/package/apkg/import/cards.rs +++ b/rslib/src/import_export/package/apkg/import/cards.rs @@ -78,7 +78,6 @@ impl Context<'_> { notetype_map: &HashMap, remapped_templates: &HashMap, imported_decks: &HashMap, - keep_filtered: bool, ) -> Result<()> { let mut ctx = CardContext::new( self.usn, @@ -92,16 +91,16 @@ impl Context<'_> { if ctx.scheduler_version == SchedulerVersion::V1 { return Err(AnkiError::SchedulerUpgradeRequired); } - ctx.import_cards(mem::take(&mut self.data.cards), keep_filtered)?; + ctx.import_cards(mem::take(&mut self.data.cards))?; ctx.import_revlog(mem::take(&mut self.data.revlog)) } } impl CardContext<'_> { - fn import_cards(&mut self, mut cards: Vec, keep_filtered: bool) -> Result<()> { + fn import_cards(&mut self, mut cards: Vec) -> Result<()> { for card in &mut cards { if self.map_to_imported_note(card) && !self.card_ordinal_already_exists(card) { - self.add_card(card, keep_filtered)?; + self.add_card(card)?; } // TODO: could update existing card } @@ -133,14 +132,11 @@ impl CardContext<'_> { .contains(&(card.note_id, card.template_idx)) } - fn add_card(&mut self, card: &mut Card, keep_filtered: bool) -> Result<()> { + fn add_card(&mut self, card: &mut Card) -> Result<()> { card.usn = self.usn; self.remap_deck_ids(card); self.remap_template_index(card); card.shift_collection_relative_dates(self.collection_delta); - if !keep_filtered { - card.maybe_remove_from_filtered_deck(); - } let old_id = self.uniquify_card_id(card); self.target_col.add_card_if_unique_undoable(card)?; @@ -198,12 +194,4 @@ impl Card { fn original_due_in_days_since_collection_creation(&self) -> bool { self.ctype == CardType::Review } - - fn maybe_remove_from_filtered_deck(&mut self) { - if self.is_filtered() { - // instead of moving between decks, the deck is converted to a regular one - self.original_deck_id = self.deck_id; - self.remove_from_filtered_deck_restoring_queue(); - } - } } diff --git a/rslib/src/import_export/package/apkg/import/decks.rs b/rslib/src/import_export/package/apkg/import/decks.rs index adbc50f4409..400554095a5 100644 --- a/rslib/src/import_export/package/apkg/import/decks.rs +++ b/rslib/src/import_export/package/apkg/import/decks.rs @@ -29,17 +29,10 @@ impl<'d> DeckContext<'d> { } impl Context<'_> { - pub(super) fn import_decks_and_configs( - &mut self, - keep_filtered: bool, - ) -> Result> { + pub(super) fn import_decks_and_configs(&mut self) -> Result> { let mut ctx = DeckContext::new(self.target_col, self.usn); ctx.import_deck_configs(mem::take(&mut self.data.deck_configs))?; - ctx.import_decks( - mem::take(&mut self.data.decks), - keep_filtered, - self.with_deck_configs, - )?; + ctx.import_decks(mem::take(&mut self.data.decks))?; Ok(ctx.imported_decks) } } @@ -53,42 +46,16 @@ impl DeckContext<'_> { Ok(()) } - fn import_decks( - &mut self, - mut decks: Vec, - keep_filtered: bool, - keep_configs: bool, - ) -> Result<()> { + fn import_decks(&mut self, mut decks: Vec) -> Result<()> { // ensure parents are seen before children decks.sort_unstable_by_key(|deck| deck.level()); for deck in &mut decks { - self.prepare_deck(deck, keep_filtered, keep_configs); + self.maybe_reparent(deck); self.import_deck(deck)?; } Ok(()) } - fn prepare_deck(&self, deck: &mut Deck, keep_filtered: bool, keep_config: bool) { - self.maybe_reparent(deck); - match &deck.kind { - DeckKind::Filtered(_) if !keep_filtered => { - deck.kind = DeckKind::Normal(NormalDeck { - config_id: 1, - ..Default::default() - }) - } - DeckKind::Normal(normal) if !keep_config => { - deck.kind = DeckKind::Normal(NormalDeck { - config_id: 1, - description: normal.description.clone(), - markdown_description: normal.markdown_description, - ..Default::default() - }) - } - _ => (), - } - } - fn import_deck(&mut self, deck: &mut Deck) -> Result<()> { if let Some(original) = self.get_deck_by_name(deck)? { if original.is_same_kind(deck) { @@ -224,7 +191,7 @@ mod test { DeckAdder::new("NEW PARENT::child").deck(), DeckAdder::new("new parent").deck(), ]; - ctx.import_decks(imports, false, false).unwrap(); + ctx.import_decks(imports).unwrap(); let existing_decks: HashSet<_> = ctx .target_col .get_all_deck_names(true) diff --git a/rslib/src/import_export/package/apkg/import/mod.rs b/rslib/src/import_export/package/apkg/import/mod.rs index 5b7f5655059..9c8e707ccf8 100644 --- a/rslib/src/import_export/package/apkg/import/mod.rs +++ b/rslib/src/import_export/package/apkg/import/mod.rs @@ -6,7 +6,6 @@ mod decks; mod media; mod notes; -use std::collections::HashSet; use std::fs::File; use std::path::Path; @@ -41,7 +40,6 @@ struct Context<'a> { merge_notetypes: bool, update_notes: UpdateCondition, update_notetypes: UpdateCondition, - with_deck_configs: bool, media_manager: MediaManager, archive: ZipArchive, meta: Meta, @@ -95,7 +93,6 @@ impl<'a> Context<'a> { merge_notetypes: options.merge_notetypes, update_notes: options.update_notes(), update_notetypes: options.update_notetypes(), - with_deck_configs: options.with_deck_configs, media_manager, archive, meta, @@ -114,14 +111,12 @@ impl<'a> Context<'a> { .collect(); let mut media_map = self.prepare_media()?; let note_imports = self.import_notes_and_notetypes(&mut media_map)?; - let keep_filtered = self.data.enables_filtered_decks(); - let imported_decks = self.import_decks_and_configs(keep_filtered)?; + let imported_decks = self.import_decks_and_configs()?; self.import_cards_and_revlog( ¬e_imports.id_map, ¬etypes, ¬e_imports.remapped_templates, &imported_decks, - keep_filtered, )?; self.copy_media(&mut media_map)?; Ok(note_imports.log) @@ -148,27 +143,6 @@ impl ExchangeData { Ok(data) } - - fn enables_filtered_decks(&self) -> bool { - // Earlier versions relied on the importer handling filtered decks by converting - // them into regular ones, so there is no guarantee that all original decks - // are included. And the legacy exporter included the default deck config, so we - // can't use it to determine if scheduling is included. - self.contains_scheduling() - && self.contains_all_original_decks() - && !self.deck_configs.is_empty() - } - - fn contains_scheduling(&self) -> bool { - !self.revlog.is_empty() - } - - fn contains_all_original_decks(&self) -> bool { - let deck_ids: HashSet<_> = self.decks.iter().map(|d| d.id).collect(); - self.cards - .iter() - .all(|c| c.original_deck_id.0 == 0 || deck_ids.contains(&c.original_deck_id)) - } } fn collection_to_tempfile(meta: &Meta, archive: &mut ZipArchive) -> Result { From d5b9c3fbf987a3c428cdcbff344af9ed24f0c816 Mon Sep 17 00:00:00 2001 From: RumovZ Date: Fri, 10 Nov 2023 16:44:00 +0100 Subject: [PATCH 4/6] Fix importing legacy filtered decks w/o scheduling --- rslib/src/import_export/gather.rs | 34 +++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/rslib/src/import_export/gather.rs b/rslib/src/import_export/gather.rs index 817e585a9a9..99e4babe298 100644 --- a/rslib/src/import_export/gather.rs +++ b/rslib/src/import_export/gather.rs @@ -48,10 +48,7 @@ impl ExchangeData { self.decks = guard.col.gather_decks(with_scheduling, !with_scheduling)?; self.notetypes = guard.col.gather_notetypes()?; - // Earlier versions relied on the importer handling filtered decks by converting - // them into regular ones, so there is no guarantee that all original decks - // are included. - let allow_filtered = self.contains_all_original_decks(); + let allow_filtered = self.enables_filtered_decks(); if with_scheduling { self.revlog = guard.col.gather_revlog()?; @@ -141,10 +138,31 @@ impl ExchangeData { } } - fn contains_all_original_decks(&self) -> bool { - self.cards.iter().all(|c| { - c.original_deck_id.0 == 0 || self.decks.iter().any(|d| d.id == c.original_deck_id) - }) + /// Because the legacy exporter relied on the importer handling filtered + /// decks by converting them into regular ones, there are two scenarios to + /// watch out for: + /// 1. If exported without scheduling, cards have been reset, but their deck + /// ids may point to filtered decks. + /// 2. If exported with scheduling, cards have not been reset, but their + /// original deck ids may point to missing decks. + fn enables_filtered_decks(&self) -> bool { + self.cards + .iter() + .all(|c| self.card_and_its_deck_are_normal(c) || self.original_deck_exists(c)) + } + + fn card_and_its_deck_are_normal(&self, card: &Card) -> bool { + card.original_deck_id.0 == 0 + && self + .decks + .iter() + .find(|d| d.id == card.deck_id) + .map(|d| !d.is_filtered()) + .unwrap_or_default() + } + + fn original_deck_exists(&self, card: &Card) -> bool { + card.original_deck_id.0 == 1 || self.decks.iter().any(|d| d.id == card.original_deck_id) } fn reset_cards(&mut self, col: &Collection) { From c1efbbb0d337e738becea02b3e66253ec7159d82 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Mon, 13 Nov 2023 13:42:15 +1000 Subject: [PATCH 5/6] Disable 'include deck options' by default, and fix tab order --- qt/aqt/forms/exporting.ui | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/qt/aqt/forms/exporting.ui b/qt/aqt/forms/exporting.ui index 078bab6f04c..ca627f90f9c 100644 --- a/qt/aqt/forms/exporting.ui +++ b/qt/aqt/forms/exporting.ui @@ -73,7 +73,7 @@ exporting_include_deck_configs - true + false @@ -172,9 +172,14 @@ format deck includeSched + include_deck_configs includeMedia + includeHTML includeTags - buttonBox + includeDeck + includeNotetype + includeGuid + legacy_support From ea419e7b10194e799f95548aaa8e49d7a5db6421 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Mon, 13 Nov 2023 13:43:13 +1000 Subject: [PATCH 6/6] deck options > deck presets --- ftl/core/exporting.ftl | 2 +- ftl/core/importing.ftl | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ftl/core/exporting.ftl b/ftl/core/exporting.ftl index 5f0914b1748..be612f7daa9 100644 --- a/ftl/core/exporting.ftl +++ b/ftl/core/exporting.ftl @@ -13,7 +13,7 @@ exporting-include = Include: exporting-include-html-and-media-references = Include HTML and media references exporting-include-media = Include media exporting-include-scheduling-information = Include scheduling information -exporting-include-deck-configs = Include deck options +exporting-include-deck-configs = Include deck presets exporting-include-tags = Include tags exporting-support-older-anki-versions = Support older Anki versions (slower/larger files) exporting-notes-in-plain-text = Notes in Plain Text diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 2ef6ddc027b..690e3ca5799 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -50,8 +50,8 @@ importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copi importing-notes-skipped-update-due-to-notetype = Notes not updated, as notetype has been modified since you first imported the notes: { $val } importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val } importing-include-reviews = Include reviews -importing-also-import-progress = Also import any learning progress -importing-with-deck-configs = Also import any deck options +importing-also-import-progress = Import any learning progress +importing-with-deck-configs = Import any deck presets importing-updates = Updates importing-include-reviews-help = If enabled, any previous reviews that the deck sharer included will also be imported.