Skip to content

Commit

Permalink
feat(citSci): updated csSamples to request associated records of data…
Browse files Browse the repository at this point in the history
…setItems, including media, reusing the annotationLibraryCommon functions used for AudioEvents
  • Loading branch information
peichins committed Jul 3, 2019
1 parent c8b9141 commit 597e72a
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 92 deletions.
51 changes: 33 additions & 18 deletions src/app/annotationLibrary/common.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,34 +28,34 @@ angular
return $url.formatUri(paths.site.ngRoutes.library, paramObj);
}

// function getMedia(annotation) {
// // modify annotation by reference
// // async
// Media.get(
// getMediaParameters(annotation),
// mediaGetSuccess.bind(null, annotation),
// mediaGetFailure
// );
//
// // do not block, do not wait for Media requests to finish
// return;
// }

function getMedia(annotation) {
// modify annotation by reference
/**
* For a given child model that has the properties startOffset, endOffset and AudioRecording,
* fetch the Media json and add as a property. Child models are e.g. AudioEvent or DatasetItem.
* @param childModel object
* @return {*|Function}
*/
function getMedia(childModel) {
// modify annotation/datasetItem by reference
// async
var mediaParameters;
if (childModel.constructor.name === "DatasetItem") {
mediaParameters = getDatasetItemMediaParameters(childModel);
} else {
mediaParameters = getAnnotationMediaParameters(childModel);
}


var x = Media.get(
getMediaParameters(annotation),
mediaGetSuccess.bind(null, annotation),
mediaParameters,
mediaGetSuccess.bind(null, childModel),
mediaGetFailure
);

return x.$promise;

}

function getMediaParameters(audioEvent) {
function getAnnotationMediaParameters(audioEvent) {
const recordingStart = 0.0,
padding = constants.annotationLibrary.paddingSeconds;

Expand Down Expand Up @@ -87,6 +87,21 @@ angular
};
}

function getDatasetItemMediaParameters(datasetItem) {

var startOffset = datasetItem.startTimeSeconds;
var endOffset = datasetItem.endTimeSeconds;

return {
recordingId: datasetItem.audioRecordingId,
startOffset,
endOffset,
format: "json"
};
}



function mediaGetSuccess(audioEvent, mediaValue, responseHeaders) {
audioEvent.media = new MediaModel(mediaValue.data);

Expand Down
42 changes: 1 addition & 41 deletions src/app/citizenScience/citizenScienceCommon.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@ citizenScienceCommon.factory("CitizenScienceCommon", [
"baw.models.Media",
function CitizenScienceCommon($rootScope,
UserProfile,
UserProfileEvents,
$http,
Media,
MediaModel) {
UserProfileEvents) {

var self = this;

Expand Down Expand Up @@ -56,43 +53,6 @@ citizenScienceCommon.factory("CitizenScienceCommon", [
return self.audioElementModel;
},


/**
* Returns a function that sets the media member of the scope to the
* specified recording segment. The watcher will then actually load it to the dom
* @param recordingId string
* @param startOffset float
* @param duration float
*/
bindShowAudio: function ($scope) {

var showAudio = function (recordingId, startOffset, endOffset) {

var mediaParams = {
recordingId: recordingId,
startOffset: startOffset,
endOffset: endOffset,
format: "json"
};

Media.get(
mediaParams,
function (mediaValue) {
$scope.media = new MediaModel(mediaValue.data);
},
function () {
console.warn("failed to get media");
}
);

return;

};

return showAudio;

}

};

return self.functions;
Expand Down
41 changes: 20 additions & 21 deletions src/app/citizenScience/datasetProgress/citizenScienceSamples.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ csSamples.factory("CsSamples", [
"DatasetItem",
"ProgressEvent",
"AudioRecording",
function CsSamples(DatasetItem, ProgressEvent, AudioRecording) {
"annotationLibraryCommon",
function CsSamples(DatasetItem, ProgressEvent, AudioRecording, libraryCommon) {

var self = this;

Expand Down Expand Up @@ -127,34 +128,32 @@ csSamples.factory("CsSamples", [
self.setCurrentItem();
}

self.addAudioRecordingFields(x.data.data);
} else {
console.warn("Empty page of dataset items returned");
}
var associationData = {
annotations: x.data.data,
annotationIds: x.data.data.map((dsi) => dsi.id),
recordingIds: x.data.data.map((dsi) => dsi.audioRecordingId)

});
};

};
// this adds associated records to the data
return libraryCommon.getSiteMediaAndProject(associationData).then((y) => {

// todo: generalise the annotationLibraryCommon naming to be general
// currently we are piggybacking on annotationLibrary logic, which does
// almost exactly what we need but for AudioEvents instead of DatasetItems
y.annotations.forEach((datasetItem) => {

/**
* Adds AudioRecording object to each dataset item, which lets us know the site id and UTC start time of the item
* @param datasetItems
*/
self.addAudioRecordingFields = function (datasetItems) {
datasetItem.start = new Date(datasetItem.audioRecording.recordedDate.getTime() + datasetItem.startTimeSeconds * 1000);
datasetItem.end = new Date(datasetItem.audioRecording.recordedDate.getTime() + datasetItem.endTimeSeconds * 1000);

var recordingIds = datasetItems.map(x => x.audioRecordingId);
// unique values
recordingIds = [...new Set(recordingIds)];
});

AudioRecording.getRecordingsForLibrary(recordingIds).then(x => {
});

var audioRecordings = x.data.data;

datasetItems.forEach(datasetItem => {
var audioRecording = audioRecordings.find(ar => ar.id === datasetItem.audioRecordingId);
datasetItem.audioRecording = audioRecording;
});
} else {
console.warn("Empty page of dataset items returned");
}

});

Expand Down
29 changes: 17 additions & 12 deletions src/app/citizenScience/listen/listen.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class CitizenScienceListenController {
onboardingService
) {

var self = this;
//var self = this;

/**
* The name of the css project as it appears in the dataset definition
Expand Down Expand Up @@ -60,9 +60,11 @@ class CitizenScienceListenController {

// the model passed to ngAudio
$scope.audioElementModel = CitizenScienceCommon.getAudioModel();

this.showAudio = CitizenScienceCommon.bindShowAudio($scope);

$scope.sampleContext = {
site: null,
date: null,
time: null
};

// get the study information by name, then load the appropriate question data
StudyService.studyByName($routeParams.studyName).then(x => {
Expand Down Expand Up @@ -96,21 +98,24 @@ class CitizenScienceListenController {
* When the currentItem changes, change the current audio file / spectrogram to match it
*/
$scope.$watch(function () {
return CsSamples.currentItem();

// returns the current item if the media is loaded, otherwise returns false.
var currentItem = CsSamples.currentItem();
if (currentItem.hasOwnProperty("media")) {
return currentItem;
}

return false;
},
function (item, oldVal) {
if (item) {

if (item.id !== oldVal.id) {
self.showAudio(item.audioRecordingId, item.startTimeSeconds, item.endTimeSeconds);
}

$scope.media = item.media;
if (item.hasOwnProperty("audioRecording")) {
backgroundImage.setBackgroundImageForItem(item.audioRecording, item.startTimeSeconds);
}

}
}
}, true);
});

/**
* auto play feature
Expand Down

0 comments on commit 597e72a

Please sign in to comment.