From 597e72a33f09e2849a9ee9a27757b67451f2e46f Mon Sep 17 00:00:00 2001 From: Phil Eichinski Date: Wed, 3 Jul 2019 11:59:56 +1000 Subject: [PATCH] feat(citSci): updated csSamples to request associated records of datasetItems, including media, reusing the annotationLibraryCommon functions used for AudioEvents --- src/app/annotationLibrary/common.js | 51 ++++++++++++------- .../citizenScience/citizenScienceCommon.js | 42 +-------------- .../datasetProgress/citizenScienceSamples.js | 41 ++++++++------- src/app/citizenScience/listen/listen.js | 29 ++++++----- 4 files changed, 71 insertions(+), 92 deletions(-) diff --git a/src/app/annotationLibrary/common.js b/src/app/annotationLibrary/common.js index b8090150..a1612f5b 100644 --- a/src/app/annotationLibrary/common.js +++ b/src/app/annotationLibrary/common.js @@ -28,26 +28,26 @@ angular return $url.formatUri(paths.site.ngRoutes.library, paramObj); } - // function getMedia(annotation) { - // // modify annotation by reference - // // async - // Media.get( - // getMediaParameters(annotation), - // mediaGetSuccess.bind(null, annotation), - // mediaGetFailure - // ); - // - // // do not block, do not wait for Media requests to finish - // return; - // } - - function getMedia(annotation) { - // modify annotation by reference + /** + * For a given child model that has the properties startOffset, endOffset and AudioRecording, + * fetch the Media json and add as a property. Child models are e.g. AudioEvent or DatasetItem. + * @param childModel object + * @return {*|Function} + */ + function getMedia(childModel) { + // modify annotation/datasetItem by reference // async + var mediaParameters; + if (childModel.constructor.name === "DatasetItem") { + mediaParameters = getDatasetItemMediaParameters(childModel); + } else { + mediaParameters = getAnnotationMediaParameters(childModel); + } + var x = Media.get( - getMediaParameters(annotation), - mediaGetSuccess.bind(null, annotation), + mediaParameters, + mediaGetSuccess.bind(null, childModel), mediaGetFailure ); @@ -55,7 +55,7 @@ angular } - function getMediaParameters(audioEvent) { + function getAnnotationMediaParameters(audioEvent) { const recordingStart = 0.0, padding = constants.annotationLibrary.paddingSeconds; @@ -87,6 +87,21 @@ angular }; } + function getDatasetItemMediaParameters(datasetItem) { + + var startOffset = datasetItem.startTimeSeconds; + var endOffset = datasetItem.endTimeSeconds; + + return { + recordingId: datasetItem.audioRecordingId, + startOffset, + endOffset, + format: "json" + }; + } + + + function mediaGetSuccess(audioEvent, mediaValue, responseHeaders) { audioEvent.media = new MediaModel(mediaValue.data); diff --git a/src/app/citizenScience/citizenScienceCommon.js b/src/app/citizenScience/citizenScienceCommon.js index 8f96dc48..63f59174 100644 --- a/src/app/citizenScience/citizenScienceCommon.js +++ b/src/app/citizenScience/citizenScienceCommon.js @@ -13,10 +13,7 @@ citizenScienceCommon.factory("CitizenScienceCommon", [ "baw.models.Media", function CitizenScienceCommon($rootScope, UserProfile, - UserProfileEvents, - $http, - Media, - MediaModel) { + UserProfileEvents) { var self = this; @@ -56,43 +53,6 @@ citizenScienceCommon.factory("CitizenScienceCommon", [ return self.audioElementModel; }, - - /** - * Returns a function that sets the media member of the scope to the - * specified recording segment. The watcher will then actually load it to the dom - * @param recordingId string - * @param startOffset float - * @param duration float - */ - bindShowAudio: function ($scope) { - - var showAudio = function (recordingId, startOffset, endOffset) { - - var mediaParams = { - recordingId: recordingId, - startOffset: startOffset, - endOffset: endOffset, - format: "json" - }; - - Media.get( - mediaParams, - function (mediaValue) { - $scope.media = new MediaModel(mediaValue.data); - }, - function () { - console.warn("failed to get media"); - } - ); - - return; - - }; - - return showAudio; - - } - }; return self.functions; diff --git a/src/app/citizenScience/datasetProgress/citizenScienceSamples.js b/src/app/citizenScience/datasetProgress/citizenScienceSamples.js index e9f03a3c..349a62e3 100644 --- a/src/app/citizenScience/datasetProgress/citizenScienceSamples.js +++ b/src/app/citizenScience/datasetProgress/citizenScienceSamples.js @@ -8,7 +8,8 @@ csSamples.factory("CsSamples", [ "DatasetItem", "ProgressEvent", "AudioRecording", - function CsSamples(DatasetItem, ProgressEvent, AudioRecording) { + "annotationLibraryCommon", + function CsSamples(DatasetItem, ProgressEvent, AudioRecording, libraryCommon) { var self = this; @@ -127,34 +128,32 @@ csSamples.factory("CsSamples", [ self.setCurrentItem(); } - self.addAudioRecordingFields(x.data.data); - } else { - console.warn("Empty page of dataset items returned"); - } + var associationData = { + annotations: x.data.data, + annotationIds: x.data.data.map((dsi) => dsi.id), + recordingIds: x.data.data.map((dsi) => dsi.audioRecordingId) - }); + }; - }; + // this adds associated records to the data + return libraryCommon.getSiteMediaAndProject(associationData).then((y) => { + // todo: generalise the annotationLibraryCommon naming to be general + // currently we are piggybacking on annotationLibrary logic, which does + // almost exactly what we need but for AudioEvents instead of DatasetItems + y.annotations.forEach((datasetItem) => { - /** - * Adds AudioRecording object to each dataset item, which lets us know the site id and UTC start time of the item - * @param datasetItems - */ - self.addAudioRecordingFields = function (datasetItems) { + datasetItem.start = new Date(datasetItem.audioRecording.recordedDate.getTime() + datasetItem.startTimeSeconds * 1000); + datasetItem.end = new Date(datasetItem.audioRecording.recordedDate.getTime() + datasetItem.endTimeSeconds * 1000); - var recordingIds = datasetItems.map(x => x.audioRecordingId); - // unique values - recordingIds = [...new Set(recordingIds)]; + }); - AudioRecording.getRecordingsForLibrary(recordingIds).then(x => { + }); - var audioRecordings = x.data.data; - datasetItems.forEach(datasetItem => { - var audioRecording = audioRecordings.find(ar => ar.id === datasetItem.audioRecordingId); - datasetItem.audioRecording = audioRecording; - }); + } else { + console.warn("Empty page of dataset items returned"); + } }); diff --git a/src/app/citizenScience/listen/listen.js b/src/app/citizenScience/listen/listen.js index c478acf2..0e221dd7 100644 --- a/src/app/citizenScience/listen/listen.js +++ b/src/app/citizenScience/listen/listen.js @@ -11,7 +11,7 @@ class CitizenScienceListenController { onboardingService ) { - var self = this; + //var self = this; /** * The name of the css project as it appears in the dataset definition @@ -60,9 +60,11 @@ class CitizenScienceListenController { // the model passed to ngAudio $scope.audioElementModel = CitizenScienceCommon.getAudioModel(); - - this.showAudio = CitizenScienceCommon.bindShowAudio($scope); - + $scope.sampleContext = { + site: null, + date: null, + time: null + }; // get the study information by name, then load the appropriate question data StudyService.studyByName($routeParams.studyName).then(x => { @@ -96,21 +98,24 @@ class CitizenScienceListenController { * When the currentItem changes, change the current audio file / spectrogram to match it */ $scope.$watch(function () { - return CsSamples.currentItem(); + + // returns the current item if the media is loaded, otherwise returns false. + var currentItem = CsSamples.currentItem(); + if (currentItem.hasOwnProperty("media")) { + return currentItem; + } + + return false; }, function (item, oldVal) { if (item) { - - if (item.id !== oldVal.id) { - self.showAudio(item.audioRecordingId, item.startTimeSeconds, item.endTimeSeconds); - } - + $scope.media = item.media; if (item.hasOwnProperty("audioRecording")) { backgroundImage.setBackgroundImageForItem(item.audioRecording, item.startTimeSeconds); - } + } } - }, true); + }); /** * auto play feature