diff --git a/doc/modules/ROOT/pages/anlz.adoc b/doc/modules/ROOT/pages/anlz.adoc index b689f82..4f85030 100644 --- a/doc/modules/ROOT/pages/anlz.adoc +++ b/doc/modules/ROOT/pages/anlz.adoc @@ -558,7 +558,7 @@ color preview data begins at byte{nbsp}``18`` and is 7,200 (decimal) bytes long, representing 1,200 columns of waveform preview information. -The color waveform preview entries are the most complex of any of the +The color waveform preview entries are the most complex of the waveform tags. See the <> for the details. @@ -659,8 +659,7 @@ include::example$tag_shared.edn[] ---- __len_entry_bytes__ identifies how many bytes each phrase entry takes -up; so far it always has the value `18`, so each entry takes twenty -four bytes. __len_entries__ at bytes{nbsp}``10``-`11` (labeled +up; so far it always has the value `18`, so each entry takes twenty-four bytes. __len_entries__ at bytes{nbsp}``10``-`11` (labeled _len~e~_ in the diagram) specifies how many entries are present in the tag. Each entry represents one recognized phrase. diff --git a/doc/modules/ROOT/pages/exports.adoc b/doc/modules/ROOT/pages/exports.adoc index 5d712dd..5b1e6df 100644 --- a/doc/modules/ROOT/pages/exports.adoc +++ b/doc/modules/ROOT/pages/exports.adoc @@ -38,7 +38,7 @@ be mounted in DJ controllers and used to play and mix music. The file consists of a series of fixed size pages. The first page contains a file header which defines the page size and the locations of database tables of different types, by the index of their first -page. The rest of the pages consist of the data pages for all of the +page. The rest of the pages consist of the data pages for all the tables identified in the header. Each table is made up of a series of rows which may be spread across @@ -63,7 +63,7 @@ Lesniak], to whom I am hugely grateful. [[file-header]] === File Header -Unless otherwise stated, all multi-byte numbers in the file are stored +Unless otherwise stated, all multibyte numbers in the file are stored in little-endian byte order. Field names used in the byte field diagrams match the IDs assigned to them in the https://github.com/Deep-Symmetry/crate-digger/blob/master/src/main/kaitai/rekordbox_pdb.ksy[Kaitai @@ -304,7 +304,7 @@ _row~pf0~_ in the diagram (meaning “row presence flags group 0”), is found near the end of the page. The last two bytes after each row bitmask (for example _pad~0~_ after _row~pf0~_) have an unknown purpose and may always be zero, and the _row~pf0~_ bitmask takes up -the two bytes that precede them. The low order bit of this value will +the two bytes that precede them. The low-order bit of this value will be set if row 0 is really present, the next bit if row 1 is really present, and so on. The two bytes before these flags, labeled _ofs~0~_, store the offset of the first row in the page. This offset @@ -315,7 +315,7 @@ the heap, at byte `28` in the page, _ofs~0~_ would have the value As more rows are added to the page, space is allocated for them in the heap, and additional index entries are added at the end of the heap, -growing backwards. Once there have been sixteen rows added, all of the +growing backwards. Once there have been sixteen rows added, all the bits in _row~pf0~_ are accounted for, and when another row is added, before its offset entry _ofs~16~_ can be added, another row bit-mask entry _row~pf1~_ needs to be allocated, followed by its corresponding diff --git a/pom.xml b/pom.xml index 52a1819..37f35aa 100644 --- a/pom.xml +++ b/pom.xml @@ -65,6 +65,13 @@ 1.1.4 + + + org.apiguardian + apiguardian-api + 1.1.2 + + org.slf4j diff --git a/src/main/java/org/deepsymmetry/cratedigger/Archivist.java b/src/main/java/org/deepsymmetry/cratedigger/Archivist.java index da2d0a5..c408fca 100644 --- a/src/main/java/org/deepsymmetry/cratedigger/Archivist.java +++ b/src/main/java/org/deepsymmetry/cratedigger/Archivist.java @@ -1,15 +1,15 @@ package org.deepsymmetry.cratedigger; +import org.apiguardian.api.API; import org.deepsymmetry.cratedigger.pdb.RekordboxPdb; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; +import java.nio.file.*; import java.util.Iterator; import java.util.Map; @@ -17,8 +17,11 @@ * Supports the creation of archives of all the metadata needed from rekordbox media exports to enable full Beat Link * features when working with the Opus Quad, which is unable to serve the metadata itself. */ +@API(status = API.Status.EXPERIMENTAL) public class Archivist { + private static final Logger logger = LoggerFactory.getLogger(Archivist.class); + /** * Holds the singleton instance of this class. */ @@ -29,6 +32,7 @@ public class Archivist { * * @return the only instance that exists */ + @API(status = API.Status.EXPERIMENTAL) public static Archivist getInstance() { return instance; } @@ -44,6 +48,7 @@ private Archivist() { * An interface that can be used to display progress to the user as an archive is being created, and allow * them to cancel the process if desired. */ + @API(status = API.Status.EXPERIMENTAL) public interface ArchiveListener { /** @@ -68,6 +73,7 @@ public interface ArchiveListener { * * @throws IOException if there is a problem creating the archive */ + @API(status = API.Status.EXPERIMENTAL) public void createArchive(Database database, File file) throws IOException { createArchive(database, file, null); } @@ -84,6 +90,7 @@ public void createArchive(Database database, File file) throws IOException { * * @throws IOException if there is a problem creating the archive */ + @API(status = API.Status.EXPERIMENTAL) public void createArchive(Database database, File archiveFile, ArchiveListener listener) throws IOException { final Path archivePath = archiveFile.toPath(); final Path mediaPath = database.sourceFile.getParentFile().getParentFile().getParentFile().toPath(); @@ -107,38 +114,21 @@ public void createArchive(Database database, File archiveFile, ArchiveListener l // First the original analysis file. final String anlzPathString = Database.getText(track.analyzePath()); - final Path anlzPath = mediaPath.resolve(anlzPathString.substring(1)); - Path destPath = fileSystem.getPath(anlzPathString); - Files.createDirectories(destPath.getParent()); - Files.copy(anlzPath, destPath); + archiveMediaItem(mediaPath, anlzPathString, fileSystem, "analysis file"); // Then the extended analysis file, if it exists. final String extPathString = anlzPathString.substring(0, anlzPathString.length() - 3) + "EXT"; - final Path extPath = mediaPath.resolve(extPathString.substring(1)); - if (extPath.toFile().canRead()) { - destPath = fileSystem.getPath(extPathString); - Files.copy(extPath, destPath); - } + archiveMediaItem(mediaPath, extPathString, fileSystem, "extended analysis file"); // Finally, the album art. final RekordboxPdb.ArtworkRow artwork = database.artworkIndex.get(track.artworkId()); if (artwork != null) { final String artPathString = Database.getText(artwork.path()); - final Path artPath = mediaPath.resolve(artPathString.substring(1)); - // First copy the regular resolution album art - if (artPath.toFile().canRead()) { - destPath = fileSystem.getPath(artPathString); - Files.createDirectories(destPath.getParent()); - Files.copy(artPath, destPath); - } + archiveMediaItem(mediaPath, artPathString, fileSystem, "artwork file"); + // Then, copy the high resolution album art, if it exists final String highResArtPathString = artPathString.replaceFirst("(\\.\\w+$)", "_m$1"); - final Path highResArtPath = mediaPath.resolve(highResArtPathString.substring(1)); - if (highResArtPath.toFile().canRead()) { - destPath = fileSystem.getPath(highResArtPathString); - Files.createDirectories(destPath.getParent()); - Files.copy(highResArtPath, destPath); - } + archiveMediaItem(mediaPath, highResArtPathString, fileSystem, "high-resolution artwork file"); } ++completed; // For use in providing progress feedback if there is a listener. @@ -160,4 +150,25 @@ public void createArchive(Database database, File archiveFile, ArchiveListener l } } } + + /** + * Helper method to archive a single media export file when creating a metadata archive. + * + * @param mediaPath the path to the file to be archived + * @param pathString the string which holds the absolute path to the media item + * @param archive the ZIP filesystem in which the metadata archive is being created + * @param description the text identifying the type of file being archived, in case we need to log a warning + * + * @throws IOException if there is an unexpected problem adding the media item to the archive + */ + private static void archiveMediaItem(Path mediaPath, String pathString, FileSystem archive, String description) throws IOException { + final Path sourcePath = mediaPath.resolve(pathString.substring(1)); + final Path destinationPath = archive.getPath(pathString); + Files.createDirectories(destinationPath.getParent()); + try { + Files.copy(sourcePath, destinationPath); + } catch (FileAlreadyExistsException e) { + logger.warn("Skipping copy of {} {} because it has already been archived." , description, destinationPath); + } + } } diff --git a/src/main/java/org/deepsymmetry/cratedigger/Database.java b/src/main/java/org/deepsymmetry/cratedigger/Database.java index 2e2399b..99d5b3e 100644 --- a/src/main/java/org/deepsymmetry/cratedigger/Database.java +++ b/src/main/java/org/deepsymmetry/cratedigger/Database.java @@ -2,6 +2,7 @@ import io.kaitai.struct.KaitaiStruct; import io.kaitai.struct.RandomAccessFileKaitaiStream; +import org.apiguardian.api.API; import org.deepsymmetry.cratedigger.pdb.RekordboxPdb; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -15,7 +16,8 @@ /** *

Parses rekordbox database export files, providing access to the information they contain.

*/ -@SuppressWarnings("WeakerAccess") +@SuppressWarnings("ClassEscapesDefinedScope") +@API(status = API.Status.STABLE) public class Database implements Closeable { private static final Logger logger = LoggerFactory.getLogger(Database.class); @@ -31,55 +33,56 @@ public class Database implements Closeable { public final File sourceFile; /** - * Construct a database access instance from the specified recordbox export file. + *

Construct a database access instance from the specified recordbox export file. * The file can obtained either from the SD or USB media, or directly from a player - * using {@link FileFetcher#fetch(InetAddress, String, String, File)}. + * using {@link FileFetcher#fetch(InetAddress, String, String, File)}.

* - * Be sure to call {@link #close()} when you are done using the parsed database + *

Be sure to call {@link #close()} when you are done using the parsed database * to close the underlying file or users will be unable to unmount the drive holding - * it until they quit your program. + * it until they quit your program.

* * @param sourceFile an export.pdb file * * @throws IOException if there is a problem reading the file */ + @API(status = API.Status.STABLE) public Database(File sourceFile) throws IOException { this.sourceFile = sourceFile; pdb = new RekordboxPdb(new RandomAccessFileKaitaiStream(sourceFile.getAbsolutePath())); - final SortedMap> mutableTrackTitleIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); - final SortedMap> mutableTrackArtistIndex = new TreeMap>(); - final SortedMap> mutableTrackAlbumIndex = new TreeMap>(); - final SortedMap> mutableTrackGenreIndex = new TreeMap>(); + final SortedMap> mutableTrackTitleIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableTrackArtistIndex = new TreeMap<>(); + final SortedMap> mutableTrackAlbumIndex = new TreeMap<>(); + final SortedMap> mutableTrackGenreIndex = new TreeMap<>(); trackIndex = indexTracks(mutableTrackTitleIndex, mutableTrackArtistIndex, mutableTrackAlbumIndex, mutableTrackGenreIndex); trackTitleIndex = freezeSecondaryIndex(mutableTrackTitleIndex); trackAlbumIndex = freezeSecondaryIndex(mutableTrackAlbumIndex); trackArtistIndex = freezeSecondaryIndex(mutableTrackArtistIndex); trackGenreIndex = freezeSecondaryIndex(mutableTrackGenreIndex); - final SortedMap> mutableArtistNameIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableArtistNameIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); artistIndex = indexArtists(mutableArtistNameIndex); artistNameIndex = freezeSecondaryIndex(mutableArtistNameIndex); - final SortedMap> mutableColorNameIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableColorNameIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); colorIndex = indexColors(mutableColorNameIndex); colorNameIndex = freezeSecondaryIndex(mutableColorNameIndex); - final SortedMap> mutableAlbumNameIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); - final SortedMap> mutableAlbumArtistIndex = new TreeMap>(); + final SortedMap> mutableAlbumNameIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableAlbumArtistIndex = new TreeMap<>(); albumIndex = indexAlbums(mutableAlbumNameIndex, mutableAlbumArtistIndex); albumNameIndex = freezeSecondaryIndex(mutableAlbumNameIndex); albumArtistIndex = freezeSecondaryIndex(mutableAlbumArtistIndex); - final SortedMap> mutableLabelNameIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableLabelNameIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); labelIndex = indexLabels(mutableLabelNameIndex); labelNameIndex = freezeSecondaryIndex(mutableLabelNameIndex); - final SortedMap> mutableMusicalKeyNameIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableMusicalKeyNameIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); musicalKeyIndex = indexKeys(mutableMusicalKeyNameIndex); musicalKeyNameIndex = freezeSecondaryIndex(mutableMusicalKeyNameIndex); - final SortedMap> mutableGenreNameIndex = new TreeMap>(String.CASE_INSENSITIVE_ORDER); + final SortedMap> mutableGenreNameIndex = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); genreIndex = indexGenres(mutableGenreNameIndex); genreNameIndex = freezeSecondaryIndex(mutableGenreNameIndex); @@ -164,11 +167,7 @@ private void indexRows(RekordboxPdb.PageType type, RowHandler handler) { * @param the type of the key (often String, but may be Long, e.g. to index tracks by artist ID) */ private void addToSecondaryIndex(SortedMap> index, K key, Long id) { - SortedSet existingIds = index.get(key); - if (existingIds == null) { - existingIds = new TreeSet(); - index.put(key, existingIds); - } + SortedSet existingIds = index.computeIfAbsent(key, k -> new TreeSet<>()); existingIds.add(id); } @@ -182,9 +181,7 @@ private void addToSecondaryIndex(SortedMap> index, K key, * @return an unmodifiable top-level view of the unmodifiable children */ private SortedMap> freezeSecondaryIndex(SortedMap> index) { - for (K key : index.keySet()) { - index.put(key, Collections.unmodifiableSortedSet(index.get(key))); - } + index.replaceAll((k, v) -> Collections.unmodifiableSortedSet(index.get(k))); return Collections.unmodifiableSortedMap(index); } @@ -196,41 +193,43 @@ private SortedMap> freezeSecondaryIndex(SortedMap trackIndex; /** * A sorted map from track title to the set of track IDs with that title. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> trackTitleIndex; /** * A sorted map from artist ID to the set of track IDs associated with that artist. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> trackArtistIndex; /** * A sorted map from album ID to the set of track IDs associated with that album. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> trackAlbumIndex; /** * A sorted map from genre ID to the set of track IDs associated with that genre. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> trackGenreIndex; /** * A sorted map from history playlist name to the ID by which its entries can be found. */ + @API(status = API.Status.STABLE) public final SortedMap historyPlaylistNameIndex; /** * A map from playlist ID to the list of tracks IDs making up a history playlist. */ + @API(status = API.Status.STABLE) public final Map> historyPlaylistIndex; /** @@ -247,43 +246,40 @@ private Map indexTracks(final SortedMap> artistIndex, final SortedMap> albumIndex, final SortedMap> genreIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.TRACKS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - // We found a track; index it by its ID. - RekordboxPdb.TrackRow trackRow = (RekordboxPdb.TrackRow)row; - final long id = trackRow.id(); - index.put(id, trackRow); - - // Index the track ID by title, artist (in all roles), album, and genre as well. - final String title = getText(trackRow.title()); - if (title.length() > 0) { - addToSecondaryIndex(titleIndex, title, id); - } - if (trackRow.artistId() > 0) { - addToSecondaryIndex(artistIndex, trackRow.artistId(), id); - } - if (trackRow.composerId() > 0) { - addToSecondaryIndex(artistIndex, trackRow.composerId(), id); - } - if (trackRow.originalArtistId() > 0) { - addToSecondaryIndex(artistIndex, trackRow.originalArtistId(), id); - } - if (trackRow.remixerId() > 0) { - addToSecondaryIndex(artistIndex, trackRow.remixerId(), id); - } - if (trackRow.albumId() > 0) { - addToSecondaryIndex(albumIndex, trackRow.albumId(), id); - } - if (trackRow.genreId() > 0) { - addToSecondaryIndex(genreIndex, trackRow.genreId(), id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.TRACKS, row -> { + // We found a track; index it by its ID. + RekordboxPdb.TrackRow trackRow = (RekordboxPdb.TrackRow)row; + final long id = trackRow.id(); + index.put(id, trackRow); + + // Index the track ID by title, artist (in all roles), album, and genre as well. + final String title = getText(trackRow.title()); + if (!title.isEmpty()) { + addToSecondaryIndex(titleIndex, title, id); + } + if (trackRow.artistId() > 0) { + addToSecondaryIndex(artistIndex, trackRow.artistId(), id); + } + if (trackRow.composerId() > 0) { + addToSecondaryIndex(artistIndex, trackRow.composerId(), id); + } + if (trackRow.originalArtistId() > 0) { + addToSecondaryIndex(artistIndex, trackRow.originalArtistId(), id); + } + if (trackRow.remixerId() > 0) { + addToSecondaryIndex(artistIndex, trackRow.remixerId(), id); + } + if (trackRow.albumId() > 0) { + addToSecondaryIndex(albumIndex, trackRow.albumId(), id); + } + if (trackRow.genreId() > 0) { + addToSecondaryIndex(genreIndex, trackRow.genreId(), id); } }); - logger.info("Indexed " + index.size() + " Tracks."); + logger.info("Indexed {} Tracks.", index.size()); return Collections.unmodifiableMap(index); } @@ -291,13 +287,13 @@ public void rowFound(KaitaiStruct row) { /** * A map from artist ID to the actual artist object. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map artistIndex; /** * A sorted map from artist name to the set of artist IDs with that name. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> artistNameIndex; /** @@ -308,24 +304,21 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable primary artist index */ private Map indexArtists(final SortedMap> nameIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.ARTISTS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.ArtistRow artistRow = (RekordboxPdb.ArtistRow)row; - final long id = artistRow.id(); - index.put(id, artistRow); - - // Index the artist ID by name as well. - final String name = getText(artistRow.name()); - if (name.length() > 0) { - addToSecondaryIndex(nameIndex, name, id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.ARTISTS, row -> { + RekordboxPdb.ArtistRow artistRow = (RekordboxPdb.ArtistRow)row; + final long id = artistRow.id(); + index.put(id, artistRow); + + // Index the artist ID by name as well. + final String name = getText(artistRow.name()); + if (!name.isEmpty()) { + addToSecondaryIndex(nameIndex, name, id); } }); - logger.info("Indexed " + index.size() + " Artists."); + logger.info("Indexed {} Artists.", index.size()); return Collections.unmodifiableMap(index); } @@ -333,13 +326,13 @@ public void rowFound(KaitaiStruct row) { /** * A map from color ID to the actual color object. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map colorIndex; /** * A sorted map from color name to the set of color IDs with that name. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> colorNameIndex; /** @@ -350,43 +343,40 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable primary color index */ private Map indexColors(final SortedMap> nameIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.COLORS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.ColorRow colorRow = (RekordboxPdb.ColorRow)row; - final long id = colorRow.id(); - index.put(id, colorRow); - - // Index the color by name as well. - final String name = Database.getText(colorRow.name()); - if (name.length() > 0) { - addToSecondaryIndex(nameIndex, name, id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.COLORS, row -> { + RekordboxPdb.ColorRow colorRow = (RekordboxPdb.ColorRow)row; + final long id = colorRow.id(); + index.put(id, colorRow); + + // Index the color by name as well. + final String name = Database.getText(colorRow.name()); + if (!name.isEmpty()) { + addToSecondaryIndex(nameIndex, name, id); } }); - logger.info("Indexed " + index.size() + " Colors."); + logger.info("Indexed {} Colors.", index.size()); return Collections.unmodifiableMap(index); } /** * A map from album ID to the actual album object. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map albumIndex; /** * A sorted map from album name to the set of album IDs with that name. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> albumNameIndex; /** * A sorted map from artist ID to the set of album IDs associated with that artist. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> albumArtistIndex; /** @@ -399,27 +389,24 @@ public void rowFound(KaitaiStruct row) { */ private Map indexAlbums(final SortedMap> nameIndex, final SortedMap> artistIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.ALBUMS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.AlbumRow albumRow = (RekordboxPdb.AlbumRow) row; - final long id = albumRow.id(); - index.put(id, albumRow); - - // Index the album ID by name and artist as well. - final String name = getText(albumRow.name()); - if (name.length() > 0) { - addToSecondaryIndex(nameIndex, name, id); - } - if (albumRow.artistId() > 0) { - addToSecondaryIndex(artistIndex, albumRow.artistId(), id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.ALBUMS, row -> { + RekordboxPdb.AlbumRow albumRow = (RekordboxPdb.AlbumRow) row; + final long id = albumRow.id(); + index.put(id, albumRow); + + // Index the album ID by name and artist as well. + final String name = getText(albumRow.name()); + if (!name.isEmpty()) { + addToSecondaryIndex(nameIndex, name, id); + } + if (albumRow.artistId() > 0) { + addToSecondaryIndex(artistIndex, albumRow.artistId(), id); } }); - logger.info("Indexed " + index.size() + " Albums."); + logger.info("Indexed {} Albums.", index.size()); return Collections.unmodifiableMap(index); } @@ -427,13 +414,13 @@ public void rowFound(KaitaiStruct row) { /** * A map from label ID to the actual label object. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map labelIndex; /** * A sorted map from label name to the set of label IDs with that name. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> labelNameIndex; /** @@ -444,24 +431,21 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable primary label index */ private Map indexLabels(final SortedMap> nameIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.LABELS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.LabelRow labelRow = (RekordboxPdb.LabelRow) row; - final long id = labelRow.id(); - index.put(id, labelRow); - - // Index the label ID by name as well. - final String name = getText(labelRow.name()); - if (name.length() > 0) { - addToSecondaryIndex(nameIndex, name, id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.LABELS, row -> { + RekordboxPdb.LabelRow labelRow = (RekordboxPdb.LabelRow) row; + final long id = labelRow.id(); + index.put(id, labelRow); + + // Index the label ID by name as well. + final String name = getText(labelRow.name()); + if (!name.isEmpty()) { + addToSecondaryIndex(nameIndex, name, id); } }); - logger.info("Indexed " + index.size() + " Labels."); + logger.info("Indexed {} Labels.", index.size()); return Collections.unmodifiableMap(index); } @@ -469,13 +453,13 @@ public void rowFound(KaitaiStruct row) { /** * A map from (musical) key ID to the actual key object. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map musicalKeyIndex; /** * A sorted map from musical key name to the set of musical key IDs with that name. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> musicalKeyNameIndex; /** @@ -486,24 +470,21 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable primary musical key index */ private Map indexKeys(final SortedMap> nameIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.KEYS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.KeyRow keyRow = (RekordboxPdb.KeyRow) row; - final long id = keyRow.id(); - index.put(id, keyRow); - - // Index the musical key ID by name as well. - final String name = getText(keyRow.name()); - if (name.length() > 0) { - addToSecondaryIndex(nameIndex, name, id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.KEYS, row -> { + RekordboxPdb.KeyRow keyRow = (RekordboxPdb.KeyRow) row; + final long id = keyRow.id(); + index.put(id, keyRow); + + // Index the musical key ID by name as well. + final String name = getText(keyRow.name()); + if (!name.isEmpty()) { + addToSecondaryIndex(nameIndex, name, id); } }); - logger.info("Indexed " + index.size() + " Musical Keys."); + logger.info("Indexed {} Musical Keys.", index.size()); return Collections.unmodifiableMap(index); } @@ -511,13 +492,13 @@ public void rowFound(KaitaiStruct row) { /** * A map from genre ID to the actual genre object. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map genreIndex; /** * A sorted map from genre name to the set of genre IDs with that name. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final SortedMap> genreNameIndex; /** @@ -528,31 +509,28 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable primary genre index */ private Map indexGenres(final SortedMap> nameIndex) { - final Map index = new HashMap(); - - indexRows(RekordboxPdb.PageType.GENRES, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.GenreRow genreRow = (RekordboxPdb.GenreRow) row; - final long id = genreRow.id(); - index.put(id, genreRow); - - // Index the genre by name as well. - final String name = getText(genreRow.name()); - if (name.length() > 0) { - addToSecondaryIndex(nameIndex, name, id); - } + final Map index = new HashMap<>(); + + indexRows(RekordboxPdb.PageType.GENRES, row -> { + RekordboxPdb.GenreRow genreRow = (RekordboxPdb.GenreRow) row; + final long id = genreRow.id(); + index.put(id, genreRow); + + // Index the genre by name as well. + final String name = getText(genreRow.name()); + if (!name.isEmpty()) { + addToSecondaryIndex(nameIndex, name, id); } }); - logger.info("Indexed " + index.size() + " Genres."); + logger.info("Indexed {} Genres.", index.size()); return Collections.unmodifiableMap(index); } /** * A map from artwork ID to the artwork row containing its file path. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final Map artworkIndex; /** @@ -561,29 +539,28 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable artwork path index */ private Map indexArtwork() { - final Map index = new HashMap(); + final Map index = new HashMap<>(); - indexRows(RekordboxPdb.PageType.ARTWORK, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.ArtworkRow artworkRow = (RekordboxPdb.ArtworkRow) row; - index.put(artworkRow.id(), artworkRow); - } + indexRows(RekordboxPdb.PageType.ARTWORK, row -> { + RekordboxPdb.ArtworkRow artworkRow = (RekordboxPdb.ArtworkRow) row; + index.put(artworkRow.id(), artworkRow); }); - logger.info(("Indexed " + index.size() + " Artwork Paths.")); + logger.info("Indexed {} Artwork Paths.", index.size()); return Collections.unmodifiableMap(index); } /** * A map from playlist ID to the list of tracks IDs making up a playlist. */ + @API(status = API.Status.STABLE) public final Map> playlistIndex; /** * Playlist folders can either contain playlists or other folders. Each * entry has a flag explaining how the ID should be interpreted. */ + @API(status = API.Status.STABLE) public static class PlaylistFolderEntry { /** * The name by which this playlist or folder is known. @@ -619,6 +596,10 @@ public String toString() { } } + /** + * A map from folder ID to the list of playlists or folders in a playlist folder. + */ + @API(status = API.Status.STABLE) public final Map> playlistFolderIndex; /** @@ -627,27 +608,22 @@ public String toString() { * @return the populated and unmodifiable playlist index */ private Map> indexPlaylists() { - final Map> result = new HashMap>(); - indexRows(RekordboxPdb.PageType.PLAYLIST_ENTRIES, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.PlaylistEntryRow entryRow = (RekordboxPdb.PlaylistEntryRow) row; - ArrayList playlist = (ArrayList) result.get(entryRow.playlistId()); - if (playlist == null) { - playlist = new ArrayList(); - result.put(entryRow.playlistId(), playlist); - } - while (playlist.size() <= entryRow.entryIndex()) { // Grow to hold the new entry we are going to set. - playlist.add(0L); - } - playlist.set((int) entryRow.entryIndex(), entryRow.trackId()); + final Map> result = new HashMap<>(); + indexRows(RekordboxPdb.PageType.PLAYLIST_ENTRIES, row -> { + RekordboxPdb.PlaylistEntryRow entryRow = (RekordboxPdb.PlaylistEntryRow) row; + ArrayList playlist = (ArrayList) result.get(entryRow.playlistId()); + if (playlist == null) { + playlist = new ArrayList<>(); + result.put(entryRow.playlistId(), playlist); } + while (playlist.size() <= entryRow.entryIndex()) { // Grow to hold the new entry we are going to set. + playlist.add(0L); + } + playlist.set((int) entryRow.entryIndex(), entryRow.trackId()); }); // Freeze the finished lists and overall map - for (Map.Entry> entry : result.entrySet()) { - result.put(entry.getKey(), Collections.unmodifiableList(entry.getValue())); - } - logger.info("Indexed " + result.size() + " playlists."); + result.replaceAll((k, v) -> Collections.unmodifiableList(v)); + logger.info("Indexed {} playlists.", result.size()); return Collections.unmodifiableMap(result); } @@ -657,28 +633,23 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable playlist folder index */ private Map> indexPlaylistFolders() { - final Map> result = new HashMap>(); - indexRows(RekordboxPdb.PageType.PLAYLIST_TREE, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.PlaylistTreeRow treeRow = (RekordboxPdb.PlaylistTreeRow) row; - ArrayList parent = (ArrayList) result.get(treeRow.parentId()); - if (parent == null) { - parent = new ArrayList(); - result.put(treeRow.parentId(), parent); - } - while (parent.size() <= treeRow.sortOrder()) { // Grow to hold the new entry we are going to set. - parent.add(null); - } - parent.set((int) treeRow.sortOrder(), new PlaylistFolderEntry(Database.getText(treeRow.name()), - treeRow.isFolder(), treeRow.id())); + final Map> result = new HashMap<>(); + indexRows(RekordboxPdb.PageType.PLAYLIST_TREE, row -> { + RekordboxPdb.PlaylistTreeRow treeRow = (RekordboxPdb.PlaylistTreeRow) row; + ArrayList parent = (ArrayList) result.get(treeRow.parentId()); + if (parent == null) { + parent = new ArrayList<>(); + result.put(treeRow.parentId(), parent); } + while (parent.size() <= treeRow.sortOrder()) { // Grow to hold the new entry we are going to set. + parent.add(null); + } + parent.set((int) treeRow.sortOrder(), new PlaylistFolderEntry(Database.getText(treeRow.name()), + treeRow.isFolder(), treeRow.id())); }); // Freeze the finished lists and overall map - for (Map.Entry> entry : result.entrySet()) { - result.put(entry.getKey(), Collections.unmodifiableList(entry.getValue())); - } - logger.info("Indexed " + result.size() + " playlist folders."); + result.replaceAll((k, v) -> Collections.unmodifiableList(v)); + logger.info("Indexed {} playlist folders.", result.size()); return Collections.unmodifiableMap(result); } @@ -688,15 +659,12 @@ public void rowFound(KaitaiStruct row) { * @return a map sorted by the history playlist names identifying the IDs by which their entries can be found. */ private SortedMap indexHistoryPlaylistNames() { - final SortedMap result = new TreeMap(); - indexRows(RekordboxPdb.PageType.HISTORY_PLAYLISTS, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.HistoryPlaylistRow historyRow = (RekordboxPdb.HistoryPlaylistRow) row; - result.put(getText(historyRow.name()), historyRow.id()); - } + final SortedMap result = new TreeMap<>(); + indexRows(RekordboxPdb.PageType.HISTORY_PLAYLISTS, row -> { + RekordboxPdb.HistoryPlaylistRow historyRow = (RekordboxPdb.HistoryPlaylistRow) row; + result.put(getText(historyRow.name()), historyRow.id()); }); - logger.info("Indexed " + result.size() + " history playlists."); + logger.info("Indexed {} history playlist names.", result.size()); return Collections.unmodifiableSortedMap(result); } @@ -706,27 +674,22 @@ public void rowFound(KaitaiStruct row) { * @return the populated and unmodifiable history playlist index. */ private Map> indexHistoryPlaylists() { - final Map> result = new HashMap>(); - indexRows(RekordboxPdb.PageType.HISTORY_ENTRIES, new RowHandler() { - @Override - public void rowFound(KaitaiStruct row) { - RekordboxPdb.HistoryEntryRow entryRow = (RekordboxPdb.HistoryEntryRow) row; - ArrayList playList = (ArrayList) result.get(entryRow.playlistId()); - if (playList == null) { - playList = new ArrayList(); - result.put(entryRow.playlistId(), playList); - } - while (playList.size() <= entryRow.entryIndex()) { // Grow to hold the new entry we are going to set. - playList.add(0L); - } - playList.set((int) entryRow.entryIndex(), entryRow.trackId()); + final Map> result = new HashMap<>(); + indexRows(RekordboxPdb.PageType.HISTORY_ENTRIES, row -> { + RekordboxPdb.HistoryEntryRow entryRow = (RekordboxPdb.HistoryEntryRow) row; + ArrayList playList = (ArrayList) result.get(entryRow.playlistId()); + if (playList == null) { + playList = new ArrayList<>(); + result.put(entryRow.playlistId(), playList); + } + while (playList.size() <= entryRow.entryIndex()) { // Grow to hold the new entry we are going to set. + playList.add(0L); } + playList.set((int) entryRow.entryIndex(), entryRow.trackId()); }); // Freeze the finished lists and overall map. - for (Map.Entry> entry : result.entrySet()) { - result.put(entry.getKey(), Collections.unmodifiableList(entry.getValue())); - } - logger.info("Indexed " + result.size() + " history playlists."); + result.replaceAll((k, v) -> Collections.unmodifiableList(v)); + logger.info("Indexed {} history playlists.", result.size()); return Collections.unmodifiableMap(result); } @@ -750,7 +713,7 @@ public void close() throws IOException { * * @return the text it contains, which may have zero length, but will never be {@code null} */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public static String getText(RekordboxPdb.DeviceSqlString string) { String text = null; if (string.body() instanceof RekordboxPdb.DeviceSqlShortAscii) { @@ -763,7 +726,7 @@ public static String getText(RekordboxPdb.DeviceSqlString string) { if (text != null) { return text; } - logger.warn("Received unusable DeviceSqlString, returning empty string; lengthAndKind: " + string.lengthAndKind()); + logger.warn("Received unusable DeviceSqlString, returning empty string; lengthAndKind: {}", string.lengthAndKind()); return ""; } } diff --git a/src/main/java/org/deepsymmetry/cratedigger/FileFetcher.java b/src/main/java/org/deepsymmetry/cratedigger/FileFetcher.java index 880f752..1372b54 100644 --- a/src/main/java/org/deepsymmetry/cratedigger/FileFetcher.java +++ b/src/main/java/org/deepsymmetry/cratedigger/FileFetcher.java @@ -1,6 +1,7 @@ package org.deepsymmetry.cratedigger; import org.acplt.oncrpc.*; +import org.apiguardian.api.API; import org.deepsymmetry.cratedigger.rpc.*; import java.io.File; @@ -9,6 +10,7 @@ import java.io.IOException; import java.net.InetAddress; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -21,27 +23,28 @@ * *

This is a singleton, so the single instance is obtained through the {@link #getInstance()} method.

*/ +@API(status = API.Status.STABLE) public class FileFetcher { /** * The character set with which paths are sent to the NFS servers running on players. */ - @SuppressWarnings("WeakerAccess") - public final static Charset CHARSET = Charset.forName("UTF-16LE"); + @API(status = API.Status.STABLE) + public final static Charset CHARSET = StandardCharsets.UTF_16LE; /** * The default number of bytes to read from the player in each request for file data. This is a trade-off * between reducing the number of requests and reducing IP fragmentation and expensive retransmissions * of already-sent fragments whenever one is lost. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public final static int DEFAULT_READ_SIZE = 2048; /** * How long to wait for a response to our UDP RPC calls before retransmitting. The players respond within a * few milliseconds if they are going to at all. */ - @SuppressWarnings("WeakerAccess") + @API(status = API.Status.STABLE) public static final int DEFAULT_RPC_RETRANSMIT_TIMEOUT = 250; /** @@ -54,6 +57,7 @@ public class FileFetcher { * * @return the only instance that exists */ + @API(status = API.Status.STABLE) public static FileFetcher getInstance() { return instance; } @@ -72,6 +76,7 @@ private FileFetcher() { * * @return the current read size */ + @API(status = API.Status.STABLE) public int getReadSize() { return readSize; } @@ -84,6 +89,7 @@ public int getReadSize() { * @param readSize the new read size, must be between 1024 and the largest value supported by NFS, inclusive * @throws IllegalArgumentException if {@code readSize} is less than 1024 or greater than {@link nfs#MAXDATA} */ + @API(status = API.Status.STABLE) public void setReadSize(int readSize) { if (readSize < 1024 || readSize > nfs.MAXDATA) { throw new IllegalArgumentException("readSize must be between 1024 and " + nfs.MAXDATA + ", inclusive."); @@ -104,6 +110,7 @@ public void setReadSize(int readSize) { * * @return the current retransmit timeout */ + @API(status = API.Status.STABLE) public int getRetransmitTimeout() { return retransmitTimeout; } @@ -114,6 +121,7 @@ public int getRetransmitTimeout() { * * @param retransmitTimeout the new retransmit timeout, must be between 1 an 30000, inclusive */ + @API(status = API.Status.STABLE) public void setRetransmitTimeout(int retransmitTimeout) { if (retransmitTimeout < 1 || retransmitTimeout > 30000) { throw new IllegalArgumentException("retransmitTimeout must be between 1 and 30000, inclusive."); @@ -128,10 +136,10 @@ public void setRetransmitTimeout(int retransmitTimeout) { private int retransmitTimeout = DEFAULT_RPC_RETRANSMIT_TIMEOUT; /** - * Keeps track of the root filesystems of the known players so we don't have to mount them every time we want a + * Keeps track of the root filesystems of the known players, so we don't have to mount them every time we want a * file. Keys are the address of the player, values are a map from mount paths to the corresponding file handles. */ - private final Map> mounts = new ConcurrentHashMap>(); + private final Map> mounts = new ConcurrentHashMap<>(); /** * Mount a filesystem in preparation to retrieving files from it. Since NFS is a stateless protocol, and the @@ -186,7 +194,7 @@ private FHandle findRoot(InetAddress player, String path) throws IOException, On // Create a cache for the player if one does not yet exist. if (playerMap == null) { - playerMap = new ConcurrentHashMap(); + playerMap = new ConcurrentHashMap<>(); mounts.put(player, playerMap); } @@ -203,6 +211,7 @@ private FHandle findRoot(InetAddress player, String path) throws IOException, On * * @param player the player that has disappeared or unmounted a filesystem */ + @API(status = API.Status.STABLE) public void removePlayer(InetAddress player) { mounts.remove(player); nfsClients.remove(player); @@ -213,7 +222,7 @@ public void removePlayer(InetAddress player) { * each time we retrieve another file from the player. The client will be created the first time we need a file * from a player, and removed when {@link #removePlayer(InetAddress)} is called. */ - private final Map nfsClients = new ConcurrentHashMap(); + private final Map nfsClients = new ConcurrentHashMap<>(); /** * Find or create the NFS client that can talk to a particular player. @@ -253,13 +262,13 @@ private DirOpResBody find (InetAddress player, String mountPath, String filePath FHandle root = findRoot(player, mountPath); OncRpcUdpClient client = getNfsClient(player); - // Iterate over the elements of the path to the file we want to find (the players can't handle multi-part + // Iterate over the elements of the path to the file we want to find (the players can't handle multipart // path names themselves). String[] elements = filePath.split("/"); FHandle fileHandle = root; DirOpRes result = null; for (String element : elements) { - if (element.length() > 0) { + if (!element.isEmpty()) { DirOpArgs args = new DirOpArgs(); args.dir = fileHandle; args.name = new Filename(element.getBytes(CHARSET)); @@ -292,9 +301,9 @@ private DirOpResBody find (InetAddress player, String mountPath, String filePath * * @throws IOException if there is a problem retrieving the file */ + @API(status = API.Status.STABLE) public void fetch(InetAddress player, String mountPath, String sourcePath, File destination) throws IOException { - FileOutputStream outputStream = new FileOutputStream(destination); - try { + try (FileOutputStream outputStream = new FileOutputStream(destination)) { // Make sure the file exists on the player, and find its file handle. DirOpResBody found = find(player, mountPath, sourcePath); if (found.attributes.type != FType.NFREG) { @@ -330,8 +339,6 @@ public void fetch(InetAddress player, String mountPath, String sourcePath, File } catch (OncRpcException e) { throw new IOException("Unable to download file \"" + sourcePath + "\", caught ONC RPC exception.", e); - } finally { - outputStream.close(); } } } \ No newline at end of file