From d5337dffbb383d0b354a6e81c2c57848f5df039e Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Sun, 26 May 2024 14:12:22 -0600 Subject: [PATCH 01/17] Initial refactoing of dataset --- anise/src/structure/dataset/builder.rs | 2 +- anise/src/structure/dataset/mod.rs | 166 ++++++++++--------------- 2 files changed, 66 insertions(+), 102 deletions(-) diff --git a/anise/src/structure/dataset/builder.rs b/anise/src/structure/dataset/builder.rs index c53b5ab1..5bc93edb 100644 --- a/anise/src/structure/dataset/builder.rs +++ b/anise/src/structure/dataset/builder.rs @@ -104,7 +104,7 @@ impl<'a, T: DataSetT, const ENTRIES: usize> DataSetBuilder { } pub fn finalize(mut self, buf: Vec) -> Result, DataSetError> { - self.dataset.bytes = Bytes::copy_from_slice(&buf); + // self.dataset.bytes = Bytes::copy_from_slice(&buf); self.dataset.set_crc32(); Ok(self.dataset) } diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index 24938e5a..28d14464 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -19,13 +19,13 @@ use crate::{ structure::dataset::error::DataSetIntegritySnafu, NaifId, }; -use bytes::Bytes; use core::fmt; -use core::marker::PhantomData; use core::ops::Deref; -use der::{asn1::OctetStringRef, Decode, Encode, Reader, Writer}; +use der::{asn1::SequenceOf, Decode, Encode, Reader, Writer}; +use heapless::Vec; use log::{error, trace}; use snafu::prelude::*; +use std::mem::size_of; macro_rules! io_imports { () => { @@ -47,7 +47,7 @@ pub use datatype::DataSetType; pub use error::DataSetError; /// The kind of data that can be encoded in a dataset -pub trait DataSetT: Default + Encode + for<'a> Decode<'a> { +pub trait DataSetT: Clone + Default + Encode + for<'a> Decode<'a> { const NAME: &'static str; } @@ -59,8 +59,7 @@ pub struct DataSet { pub lut: LookUpTable, pub data_checksum: u32, /// The actual data from the dataset - pub bytes: Bytes, - _daf_type: PhantomData, + pub data: Vec, } impl DataSet { @@ -125,7 +124,10 @@ impl DataSet { /// Compute the CRC32 of the underlying bytes pub fn crc32(&self) -> u32 { - crc32fast::hash(&self.bytes) + let size = ENTRIES * size_of::(); + let mut buf = std::vec::Vec::with_capacity(size); + let _ = self.encode_to_vec(&mut buf); + crc32fast::hash(&buf) } /// Sets the checksum of this data. @@ -135,7 +137,7 @@ impl DataSet { pub fn check_integrity(&self) -> Result<(), IntegrityError> { // Ensure that the data is correctly decoded - let computed_chksum = crc32fast::hash(&self.bytes); + let computed_chksum = self.crc32(); if computed_chksum == self.data_checksum { Ok(()) } else { @@ -167,15 +169,10 @@ impl DataSet { pub fn get_by_id(&self, id: NaifId) -> Result { if let Some(entry) = self.lut.by_id.get(&id) { // Found the ID - let bytes = self - .bytes - .get(entry.as_range()) + self.data + .get(entry.start_idx as usize) + .cloned() .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { - action: "fetching by ID", - })?; - T::from_der(bytes) - .map_err(|err| DecodingError::DecodingDer { err }) .with_context(|_| DataDecodingSnafu { action: "fetching by ID", }) @@ -190,25 +187,15 @@ impl DataSet { /// Mutates this dataset to change the value of the entry with that ID to the new provided value. /// This will return an error if the ID is not in the lookup table. /// Note that this function requires a new heap allocation to change the underlying dataset - pub fn set_by_id(&mut self, id: NaifId, new_value: &T) -> Result<(), DataSetError> { + pub fn set_by_id(&mut self, id: NaifId, new_value: T) -> Result<(), DataSetError> { if let Some(entry) = self.lut.by_id.get(&id) { - let mut bytes = self.bytes.to_vec(); - - let these_bytes = bytes - .get_mut(entry.start_idx as usize..) + *self + .data + .get_mut(entry.start_idx as usize) .ok_or_else(|| entry.decoding_error()) .with_context(|_| DataDecodingSnafu { - action: "setting by ID", - })?; - - if let Err(err) = new_value.encode_to_slice(these_bytes) { - return Err(DataSetError::DataDecoding { - action: "encoding data set when setting by ID", - source: DecodingError::DecodingDer { err }, - }); - } - - self.bytes = Bytes::from(bytes); + action: "fetching by ID", + })? = new_value; Ok(()) } else { @@ -224,21 +211,13 @@ impl DataSet { /// Note that this function requires a new heap allocation to change the underlying dataset pub fn rm_by_id(&mut self, id: NaifId) -> Result<(), DataSetError> { if let Some(entry) = self.lut.by_id.remove(&id) { - let mut bytes = self.bytes.to_vec(); - - let these_bytes = bytes - .get_mut(entry.start_idx as usize..) + *self + .data + .get_mut(entry.start_idx as usize) .ok_or_else(|| entry.decoding_error()) .with_context(|_| DataDecodingSnafu { - action: "removing by ID", - })?; - - if let Err(err) = T::default().encode_to_slice(these_bytes) { - return Err(DataSetError::DataDecoding { - action: "encoding default data set when removing by ID", - source: DecodingError::DecodingDer { err }, - }); - } + action: "fetching by ID", + })? = T::default(); // Search the names for that same entry. for (name, name_entry) in &self.lut.by_name.clone() { @@ -250,8 +229,6 @@ impl DataSet { } } - self.bytes = Bytes::from(bytes); - Ok(()) } else { Err(DataSetError::DataSetLut { @@ -264,18 +241,12 @@ impl DataSet { /// Get a copy of the data with that name, if that name is in the lookup table pub fn get_by_name(&self, name: &str) -> Result { if let Some(entry) = self.lut.by_name.get(&name.try_into().unwrap()) { - // Found the name - let bytes = self - .bytes - .get(entry.as_range()) + self.data + .get(entry.start_idx as usize) + .cloned() .ok_or_else(|| entry.decoding_error()) .with_context(|_| DataDecodingSnafu { - action: "fetching by name", - })?; - T::from_der(bytes) - .map_err(|err| DecodingError::DecodingDer { err }) - .with_context(|_| DataDecodingSnafu { - action: "fetching by name", + action: "fetching by ID", }) } else { Err(DataSetError::DataSetLut { @@ -290,25 +261,15 @@ impl DataSet { /// Mutates this dataset to change the value of the entry with that name to the new provided value. /// This will return an error if the name is not in the lookup table. /// Note that this function requires a new heap allocation to change the underlying dataset - pub fn set_by_name(&mut self, name: &str, new_value: &T) -> Result<(), DataSetError> { + pub fn set_by_name(&mut self, name: &str, new_value: T) -> Result<(), DataSetError> { if let Some(entry) = self.lut.by_name.get(&name.try_into().unwrap()) { - let mut bytes = self.bytes.to_vec(); - - let these_bytes = bytes - .get_mut(entry.start_idx as usize..) + *self + .data + .get_mut(entry.start_idx as usize) .ok_or_else(|| entry.decoding_error()) .with_context(|_| DataDecodingSnafu { - action: "setting by name", - })?; - - if let Err(err) = new_value.encode_to_slice(these_bytes) { - return Err(DataSetError::DataDecoding { - action: "encoding data set when setting by name", - source: DecodingError::DecodingDer { err }, - }); - } - - self.bytes = Bytes::from(bytes); + action: "fetching by ID", + })? = new_value; Ok(()) } else { @@ -326,21 +287,13 @@ impl DataSet { /// Note that this function requires a new heap allocation to change the underlying dataset pub fn rm_by_name(&mut self, name: &str) -> Result<(), DataSetError> { if let Some(entry) = self.lut.by_name.remove(&name.try_into().unwrap()) { - let mut bytes = self.bytes.to_vec(); - - let these_bytes = bytes - .get_mut(entry.start_idx as usize..) + *self + .data + .get_mut(entry.start_idx as usize) .ok_or_else(|| entry.decoding_error()) .with_context(|_| DataDecodingSnafu { - action: "removing by name", - })?; - - if let Err(err) = T::default().encode_to_slice(these_bytes) { - return Err(DataSetError::DataDecoding { - action: "encoding default data set when removing by name", - source: DecodingError::DecodingDer { err }, - }); - } + action: "fetching by ID", + })? = T::default(); // Search the names for that same entry. for (id, id_entry) in &self.lut.by_id.clone() { @@ -352,8 +305,6 @@ impl DataSet { } } - self.bytes = Bytes::from(bytes); - Ok(()) } else { Err(DataSetError::DataSetLut { @@ -424,23 +375,30 @@ impl DataSet { pub fn is_empty(&self) -> bool { self.len() == 0 } + + /// Returns this data as a data sequence, cloning all of the entries into this sequence. + fn build_data_seq(&self) -> SequenceOf { + let mut data_seq = SequenceOf::::new(); + for d in &self.data { + data_seq.add(d.clone()).unwrap(); + } + data_seq + } } impl Encode for DataSet { fn encoded_len(&self) -> der::Result { - let as_byte_ref = OctetStringRef::new(&self.bytes)?; self.metadata.encoded_len()? + self.lut.encoded_len()? + self.data_checksum.encoded_len()? - + as_byte_ref.encoded_len()? + + self.build_data_seq().encoded_len()? } fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - let as_byte_ref = OctetStringRef::new(&self.bytes)?; self.metadata.encode(encoder)?; self.lut.encode(encoder)?; self.data_checksum.encode(encoder)?; - as_byte_ref.encode(encoder) + self.build_data_seq().encode(encoder) } } @@ -449,13 +407,14 @@ impl<'a, T: DataSetT, const ENTRIES: usize> Decode<'a> for DataSet { let metadata = decoder.decode()?; let lut = decoder.decode()?; let crc32_checksum = decoder.decode()?; - let bytes: OctetStringRef = decoder.decode()?; + // let bytes: OctetStringRef = decoder.decode()?; + let data_seq: SequenceOf = decoder.decode()?; + let data: Vec = data_seq.iter().cloned().collect(); Ok(Self { metadata, lut, data_checksum: crc32_checksum, - bytes: Bytes::copy_from_slice(bytes.as_bytes()), - _daf_type: PhantomData::, + data, }) } } @@ -474,13 +433,14 @@ impl fmt::Display for DataSet { #[cfg(test)] mod dataset_ut { + use std::mem::size_of; + use crate::structure::{ dataset::DataSetBuilder, lookuptable::Entry, spacecraft::{DragData, Inertia, Mass, SRPData, SpacecraftData}, SpacecraftDataSet, }; - use bytes::Bytes; use super::{DataSet, Decode, Encode, LookUpTable}; @@ -568,9 +528,11 @@ mod dataset_ut { // Build the dataset let mut dataset = DataSet { lut, - bytes: Bytes::copy_from_slice(&packed_buf), + // bytes: Bytes::copy_from_slice(&packed_buf), ..Default::default() }; + dataset.data.push(srp_sc.clone()).unwrap(); + dataset.data.push(full_sc.clone()).unwrap(); dataset.set_crc32(); // And encode it. @@ -600,7 +562,7 @@ mod dataset_ut { // Grab a copy of the original data let mut sc = dataset.get_by_name("SRP spacecraft").unwrap(); sc.srp_data.as_mut().unwrap().coeff_reflectivity = 1.1; - dataset.set_by_name("SRP spacecraft", &sc).unwrap(); + dataset.set_by_name("SRP spacecraft", sc.clone()).unwrap(); // Ensure that we've modified only that entry assert_eq!( dataset.get_by_name("Full spacecraft").unwrap(), @@ -618,7 +580,7 @@ mod dataset_ut { 1.1, "value was not modified" ); - assert!(dataset.set_by_name("Unavailable SC", &sc).is_err()); + assert!(dataset.set_by_name("Unavailable SC", sc.clone()).is_err()); // Test renaming by name dataset @@ -678,6 +640,8 @@ mod dataset_ut { ..Default::default() }; + dbg!(size_of::()); + // Initialize the overall buffer for building the data let mut buf = vec![]; let mut builder = DataSetBuilder::default(); @@ -728,13 +692,13 @@ mod dataset_ut { // Check that we can set by ID let mut repr = dataset.get_by_id(-50).unwrap(); repr.mass_kg.as_mut().unwrap().dry_mass_kg = 100.5; - dataset.set_by_id(-50, &repr).unwrap(); + dataset.set_by_id(-50, repr.clone()).unwrap(); assert_eq!( dataset.get_by_id(-50).unwrap().mass_kg.unwrap().dry_mass_kg, 100.5, "value was not modified" ); - assert!(dataset.set_by_id(111, &repr).is_err()); + assert!(dataset.set_by_id(111, repr.clone()).is_err()); // Test renaming by ID dataset.lut.reid(-50, -52).unwrap(); // Calling this a second time will lead to an error From ec49af6d1d7c4e3193f6452d2ac49ed920864128 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Tue, 28 May 2024 21:47:26 -0600 Subject: [PATCH 02/17] Start moving dataset to the heap --- anise/src/structure/dataset/mod.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index 28d14464..305ef7bb 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -22,7 +22,6 @@ use crate::{ use core::fmt; use core::ops::Deref; use der::{asn1::SequenceOf, Decode, Encode, Reader, Writer}; -use heapless::Vec; use log::{error, trace}; use snafu::prelude::*; use std::mem::size_of; @@ -59,7 +58,7 @@ pub struct DataSet { pub lut: LookUpTable, pub data_checksum: u32, /// The actual data from the dataset - pub data: Vec, + pub data: Vec, } impl DataSet { @@ -409,7 +408,7 @@ impl<'a, T: DataSetT, const ENTRIES: usize> Decode<'a> for DataSet { let crc32_checksum = decoder.decode()?; // let bytes: OctetStringRef = decoder.decode()?; let data_seq: SequenceOf = decoder.decode()?; - let data: Vec = data_seq.iter().cloned().collect(); + let data: Vec = data_seq.iter().cloned().collect(); Ok(Self { metadata, lut, @@ -531,8 +530,8 @@ mod dataset_ut { // bytes: Bytes::copy_from_slice(&packed_buf), ..Default::default() }; - dataset.data.push(srp_sc.clone()).unwrap(); - dataset.data.push(full_sc.clone()).unwrap(); + dataset.data.push(srp_sc.clone()); + dataset.data.push(full_sc.clone()); dataset.set_crc32(); // And encode it. From 5700e39fe8f8541da12ddc91283f6dedb42f795b Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 29 May 2024 16:07:44 -0600 Subject: [PATCH 03/17] For some reason the checksum keeps changing and I don't expect this --- anise/src/structure/dataset/mod.rs | 108 ++++++++++++++++++++++++----- 1 file changed, 89 insertions(+), 19 deletions(-) diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index 305ef7bb..bb4b07d3 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -9,7 +9,7 @@ */ use self::error::{DataDecodingSnafu, DataSetLutSnafu}; use super::{ - lookuptable::{LookUpTable, LutError}, + lookuptable::{Entry, LookUpTable, LutError}, metadata::Metadata, semver::Semver, ANISE_VERSION, @@ -124,12 +124,16 @@ impl DataSet { /// Compute the CRC32 of the underlying bytes pub fn crc32(&self) -> u32 { let size = ENTRIES * size_of::(); - let mut buf = std::vec::Vec::with_capacity(size); - let _ = self.encode_to_vec(&mut buf); + let mut buf = Vec::with_capacity(size); + // Clone the data set, setting the CRC32 to zero for the CRC check. + let mut me = self.clone(); + me.data_checksum = u32::MAX; + let _ = me.encode_to_vec(&mut buf); crc32fast::hash(&buf) } /// Sets the checksum of this data. + /// NOTE: For this calculation, the data checksum field is set to u32::MAX; pub fn set_crc32(&mut self) { self.data_checksum = self.crc32(); } @@ -164,6 +168,77 @@ impl DataSet { } } + pub fn push( + &mut self, + item: T, + id: Option, + name: Option<&str>, + ) -> Result<(), DataSetError> { + // Build this entry data. + let entry = Entry { + start_idx: self.data.len() as u32, + end_idx: 0, + }; + + match id { + Some(id) => { + match name { + Some(name) => { + // Both an ID and a name + self.lut + .append(id, name, entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with ID and name", + })?; + // If the ID is the body of a system with a single object, also insert it for the system ID. + if [199, 299].contains(&id) { + self.lut.append(id / 100, name, entry).with_context(|_| { + DataSetLutSnafu { + action: "pushing data with ID and name", + } + })?; + } + } + None => { + // Only an ID and no name + self.lut + .append_id(id, entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with ID only", + })?; + // If the ID is the body of a system with a single object, also insert it for the system ID. + if [199, 299].contains(&id) { + self.lut.append_id(id / 100, entry).with_context(|_| { + DataSetLutSnafu { + action: "pushing data with ID and name", + } + })?; + } + } + } + } + None => { + if name.is_some() { + // Only a name + self.lut + .append_name(name.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with name only", + })?; + } else { + return Err(DataSetError::DataSetLut { + action: "pushing data", + source: LutError::NoKeyProvided, + }); + } + } + } + + self.data.push(item); + + Ok(()) + } + /// Get a copy of the data with that ID, if that ID is in the lookup table pub fn get_by_id(&self, id: NaifId) -> Result { if let Some(entry) = self.lut.by_id.get(&id) { @@ -406,7 +481,6 @@ impl<'a, T: DataSetT, const ENTRIES: usize> Decode<'a> for DataSet { let metadata = decoder.decode()?; let lut = decoder.decode()?; let crc32_checksum = decoder.decode()?; - // let bytes: OctetStringRef = decoder.decode()?; let data_seq: SequenceOf = decoder.decode()?; let data: Vec = data_seq.iter().cloned().collect(); Ok(Self { @@ -641,35 +715,31 @@ mod dataset_ut { dbg!(size_of::()); - // Initialize the overall buffer for building the data - let mut buf = vec![]; - let mut builder = DataSetBuilder::default(); - builder - .push_into(&mut buf, &srp_sc, Some(-20), Some("SRP spacecraft")) + let mut dataset = DataSet::::default(); + dataset + .push(srp_sc.clone(), Some(-20), Some("SRP spacecraft")) .unwrap(); - builder - .push_into(&mut buf, &full_sc, Some(-50), Some("Full spacecraft")) + dataset + .push(full_sc.clone(), Some(-50), Some("Full spacecraft")) .unwrap(); // Pushing without name as ID -51 - builder - .push_into(&mut buf, &full_sc, Some(-51), None) - .unwrap(); + dataset.push(full_sc.clone(), Some(-51), None).unwrap(); // Pushing without ID - builder - .push_into(&mut buf, &srp_sc, None, Some("ID less SRP spacecraft")) + dataset + .push(srp_sc.clone(), None, Some("ID less SRP spacecraft")) .unwrap(); - let mut dataset = builder.finalize(buf).unwrap(); - + // Make sure to set the CRC32. + dataset.set_crc32(); // And encode it. let mut ebuf = vec![]; dataset.encode_to_vec(&mut ebuf).unwrap(); - assert_eq!(ebuf.len(), 530); + assert_eq!(ebuf.len(), 523); let repr_dec = SpacecraftDataSet::from_bytes(ebuf); From 612682394936a793f09b91672945a2e77e70d2ab Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 29 May 2024 20:53:47 -0600 Subject: [PATCH 04/17] Remove Entry from LUT --- anise/src/naif/kpl/parser.rs | 20 ++-- anise/src/structure/dataset/builder.rs | 111 ---------------------- anise/src/structure/dataset/mod.rs | 122 +++++++++++------------- anise/src/structure/lookuptable.rs | 124 +++++++------------------ 4 files changed, 95 insertions(+), 282 deletions(-) delete mode 100644 anise/src/structure/dataset/builder.rs diff --git a/anise/src/naif/kpl/parser.rs b/anise/src/naif/kpl/parser.rs index ac8c40e0..b2a5de16 100644 --- a/anise/src/naif/kpl/parser.rs +++ b/anise/src/naif/kpl/parser.rs @@ -24,7 +24,7 @@ use crate::math::Matrix3; use crate::naif::kpl::fk::FKItem; use crate::naif::kpl::tpc::TPCItem; use crate::naif::kpl::Parameter; -use crate::structure::dataset::{DataSetBuilder, DataSetError, DataSetType}; +use crate::structure::dataset::{DataSetError, DataSetType}; use crate::structure::metadata::Metadata; use crate::structure::planetocentric::ellipsoid::Ellipsoid; use crate::structure::planetocentric::phaseangle::PhaseAngle; @@ -144,8 +144,7 @@ pub fn convert_tpc + fmt::Debug>( pck: P, gm: P, ) -> Result { - let mut buf = vec![]; - let mut dataset_builder = DataSetBuilder::default(); + let mut dataset = PlanetaryDataSet::default(); let gravity_data = parse_file::<_, TPCItem>(gm, false)?; let mut planetary_data = parse_file::<_, TPCItem>(pck, false)?; @@ -288,7 +287,7 @@ pub fn convert_tpc + fmt::Debug>( }; // Skip the DER serialization in full. - dataset_builder.push_into(&mut buf, &constant, Some(object_id), None)?; + dataset.push(constant, Some(object_id), None)?; info!("Added {object_id}"); } _ => error!( @@ -302,9 +301,9 @@ pub fn convert_tpc + fmt::Debug>( } } - println!("Added {} items", dataset_builder.dataset.lut.by_id.len()); + println!("Added {} items", dataset.lut.by_id.len()); - let mut dataset = dataset_builder.finalize(buf)?; + dataset.set_crc32(); dataset.metadata = Metadata::default(); dataset.metadata.dataset_type = DataSetType::PlanetaryData; @@ -317,8 +316,7 @@ pub fn convert_fk + fmt::Debug>( fk_file_path: P, show_comments: bool, ) -> Result { - let mut buf = vec![]; - let mut dataset_builder = DataSetBuilder::default(); + let mut dataset = EulerParameterDataSet::default(); let assignments = parse_file::<_, FKItem>(fk_file_path, show_comments)?; @@ -373,7 +371,7 @@ pub fn convert_fk + fmt::Debug>( } .into(); - dataset_builder.push_into(&mut buf, &q, Some(id), item.name.as_deref())?; + dataset.push(q, Some(id), item.name.as_deref())?; } else if let Some(matrix) = item.data.get(&Parameter::Matrix) { let mat_data = matrix.to_vec_f64().unwrap(); let rot_mat = Matrix3::new( @@ -393,11 +391,11 @@ pub fn convert_fk + fmt::Debug>( rot_mat, rot_mat_dt: None, }; - dataset_builder.push_into(&mut buf, &dcm.into(), Some(id), item.name.as_deref())?; + dataset.push(dcm.into(), Some(id), item.name.as_deref())?; } } - let mut dataset: EulerParameterDataSet = dataset_builder.finalize(buf)?; + dataset.set_crc32(); dataset.metadata = Metadata::default(); dataset.metadata.dataset_type = DataSetType::EulerParameterData; diff --git a/anise/src/structure/dataset/builder.rs b/anise/src/structure/dataset/builder.rs deleted file mode 100644 index 5bc93edb..00000000 --- a/anise/src/structure/dataset/builder.rs +++ /dev/null @@ -1,111 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-onward Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use crate::{ - structure::lookuptable::{Entry, LutError}, - NaifId, -}; -use bytes::Bytes; -use snafu::prelude::*; - -use super::{ - error::{DataSetError, DataSetLutSnafu}, - DataSet, DataSetT, -}; - -/// Dataset builder allows building a dataset. It requires allocations. -#[derive(Clone, Default, Debug)] -pub struct DataSetBuilder { - pub dataset: DataSet, -} - -impl<'a, T: DataSetT, const ENTRIES: usize> DataSetBuilder { - pub fn push_into( - &mut self, - buf: &mut Vec, - data: &T, - id: Option, - name: Option<&'a str>, - ) -> Result<(), DataSetError> { - let mut this_buf = vec![]; - data.encode_to_vec(&mut this_buf).unwrap(); - // Build this entry data. - let entry = Entry { - start_idx: buf.len() as u32, - end_idx: (buf.len() + this_buf.len()) as u32, - }; - - match id { - Some(id) => { - match name { - Some(name) => { - // Both an ID and a name - self.dataset.lut.append(id, name, entry).with_context(|_| { - DataSetLutSnafu { - action: "pushing data with ID and name", - } - })?; - // If the ID is the body of a system with a single object, also insert it for the system ID. - if [199, 299].contains(&id) { - self.dataset - .lut - .append(id / 100, name, entry) - .with_context(|_| DataSetLutSnafu { - action: "pushing data with ID and name", - })?; - } - } - None => { - // Only an ID and no name - self.dataset.lut.append_id(id, entry).with_context(|_| { - DataSetLutSnafu { - action: "pushing data with ID only", - } - })?; - // If the ID is the body of a system with a single object, also insert it for the system ID. - if [199, 299].contains(&id) { - self.dataset - .lut - .append_id(id / 100, entry) - .with_context(|_| DataSetLutSnafu { - action: "pushing data with ID and name", - })?; - } - } - } - } - None => { - if name.is_some() { - // Only a name - self.dataset - .lut - .append_name(name.unwrap(), entry) - .with_context(|_| DataSetLutSnafu { - action: "pushing data with name only", - })?; - } else { - return Err(DataSetError::DataSetLut { - action: "pushing data", - source: LutError::NoKeyProvided, - }); - } - } - } - - buf.extend_from_slice(&this_buf); - - Ok(()) - } - - pub fn finalize(mut self, buf: Vec) -> Result, DataSetError> { - // self.dataset.bytes = Bytes::copy_from_slice(&buf); - self.dataset.set_crc32(); - Ok(self.dataset) - } -} diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index bb4b07d3..9c8c76de 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -9,7 +9,7 @@ */ use self::error::{DataDecodingSnafu, DataSetLutSnafu}; use super::{ - lookuptable::{Entry, LookUpTable, LutError}, + lookuptable::{LookUpTable, LutError}, metadata::Metadata, semver::Semver, ANISE_VERSION, @@ -37,11 +37,9 @@ macro_rules! io_imports { io_imports!(); -mod builder; mod datatype; mod error; -pub use builder::DataSetBuilder; pub use datatype::DataSetType; pub use error::DataSetError; @@ -174,11 +172,7 @@ impl DataSet { id: Option, name: Option<&str>, ) -> Result<(), DataSetError> { - // Build this entry data. - let entry = Entry { - start_idx: self.data.len() as u32, - end_idx: 0, - }; + let index = self.data.len() as u32; match id { Some(id) => { @@ -186,13 +180,13 @@ impl DataSet { Some(name) => { // Both an ID and a name self.lut - .append(id, name, entry) + .append(id, name, index) .with_context(|_| DataSetLutSnafu { action: "pushing data with ID and name", })?; // If the ID is the body of a system with a single object, also insert it for the system ID. if [199, 299].contains(&id) { - self.lut.append(id / 100, name, entry).with_context(|_| { + self.lut.append(id / 100, name, index).with_context(|_| { DataSetLutSnafu { action: "pushing data with ID and name", } @@ -202,13 +196,13 @@ impl DataSet { None => { // Only an ID and no name self.lut - .append_id(id, entry) + .append_id(id, index) .with_context(|_| DataSetLutSnafu { action: "pushing data with ID only", })?; // If the ID is the body of a system with a single object, also insert it for the system ID. if [199, 299].contains(&id) { - self.lut.append_id(id / 100, entry).with_context(|_| { + self.lut.append_id(id / 100, index).with_context(|_| { DataSetLutSnafu { action: "pushing data with ID and name", } @@ -221,7 +215,7 @@ impl DataSet { if name.is_some() { // Only a name self.lut - .append_name(name.unwrap(), entry) + .append_name(name.unwrap(), index) .with_context(|_| DataSetLutSnafu { action: "pushing data with name only", })?; @@ -241,13 +235,13 @@ impl DataSet { /// Get a copy of the data with that ID, if that ID is in the lookup table pub fn get_by_id(&self, id: NaifId) -> Result { - if let Some(entry) = self.lut.by_id.get(&id) { + if let Some(index) = self.lut.by_id.get(&id) { // Found the ID self.data - .get(entry.start_idx as usize) + .get(*index as usize) .cloned() - .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { + .ok_or_else(|| LutError::InvalidIndex { index: *index }) + .with_context(|_| DataSetLutSnafu { action: "fetching by ID", }) } else { @@ -262,12 +256,12 @@ impl DataSet { /// This will return an error if the ID is not in the lookup table. /// Note that this function requires a new heap allocation to change the underlying dataset pub fn set_by_id(&mut self, id: NaifId, new_value: T) -> Result<(), DataSetError> { - if let Some(entry) = self.lut.by_id.get(&id) { + if let Some(index) = self.lut.by_id.get(&id) { *self .data - .get_mut(entry.start_idx as usize) - .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { + .get_mut(*index as usize) + .ok_or_else(|| LutError::InvalidIndex { index: *index }) + .with_context(|_| DataSetLutSnafu { action: "fetching by ID", })? = new_value; @@ -284,18 +278,18 @@ impl DataSet { /// This will return an error if the ID is not in the lookup table. /// Note that this function requires a new heap allocation to change the underlying dataset pub fn rm_by_id(&mut self, id: NaifId) -> Result<(), DataSetError> { - if let Some(entry) = self.lut.by_id.remove(&id) { + if let Some(index) = self.lut.by_id.remove(&id) { *self .data - .get_mut(entry.start_idx as usize) - .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { + .get_mut(index as usize) + .ok_or_else(|| LutError::InvalidIndex { index }) + .with_context(|_| DataSetLutSnafu { action: "fetching by ID", })? = T::default(); // Search the names for that same entry. - for (name, name_entry) in &self.lut.by_name.clone() { - if name_entry == &entry { + for (name, name_index) in &self.lut.by_name.clone() { + if name_index == &index { self.lut.rmname(name).with_context(|_| DataSetLutSnafu { action: "removing by ID", })?; @@ -314,13 +308,13 @@ impl DataSet { /// Get a copy of the data with that name, if that name is in the lookup table pub fn get_by_name(&self, name: &str) -> Result { - if let Some(entry) = self.lut.by_name.get(&name.try_into().unwrap()) { + if let Some(index) = self.lut.by_name.get(&name.try_into().unwrap()) { self.data - .get(entry.start_idx as usize) + .get(*index as usize) .cloned() - .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { - action: "fetching by ID", + .ok_or_else(|| LutError::InvalidIndex { index: *index }) + .with_context(|_| DataSetLutSnafu { + action: "fetching by name", }) } else { Err(DataSetError::DataSetLut { @@ -336,12 +330,12 @@ impl DataSet { /// This will return an error if the name is not in the lookup table. /// Note that this function requires a new heap allocation to change the underlying dataset pub fn set_by_name(&mut self, name: &str, new_value: T) -> Result<(), DataSetError> { - if let Some(entry) = self.lut.by_name.get(&name.try_into().unwrap()) { + if let Some(index) = self.lut.by_name.get(&name.try_into().unwrap()) { *self .data - .get_mut(entry.start_idx as usize) - .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { + .get_mut(*index as usize) + .ok_or_else(|| LutError::InvalidIndex { index: *index }) + .with_context(|_| DataSetLutSnafu { action: "fetching by ID", })? = new_value; @@ -360,18 +354,18 @@ impl DataSet { /// This will return an error if the name is not in the lookup table. /// Note that this function requires a new heap allocation to change the underlying dataset pub fn rm_by_name(&mut self, name: &str) -> Result<(), DataSetError> { - if let Some(entry) = self.lut.by_name.remove(&name.try_into().unwrap()) { + if let Some(index) = self.lut.by_name.remove(&name.try_into().unwrap()) { *self .data - .get_mut(entry.start_idx as usize) - .ok_or_else(|| entry.decoding_error()) - .with_context(|_| DataDecodingSnafu { + .get_mut(index as usize) + .ok_or_else(|| LutError::InvalidIndex { index }) + .with_context(|_| DataSetLutSnafu { action: "fetching by ID", })? = T::default(); // Search the names for that same entry. - for (id, id_entry) in &self.lut.by_id.clone() { - if id_entry == &entry { + for (id, id_index) in &self.lut.by_id.clone() { + if id_index == &index { self.lut.rmid(*id).with_context(|_| DataSetLutSnafu { action: "removing by name", })?; @@ -509,13 +503,11 @@ mod dataset_ut { use std::mem::size_of; use crate::structure::{ - dataset::DataSetBuilder, - lookuptable::Entry, spacecraft::{DragData, Inertia, Mass, SRPData, SpacecraftData}, SpacecraftDataSet, }; - use super::{DataSet, Decode, Encode, LookUpTable}; + use super::{DataSet, Decode, Encode}; #[test] fn zero_repr() { @@ -567,18 +559,16 @@ mod dataset_ut { let mut this_buf = vec![]; full_sc.encode_to_vec(&mut this_buf).unwrap(); - let end_idx = this_buf.len() as u32; + let end_idx = this_buf.len(); // Build this entry data. - let full_sc_entry = Entry { - start_idx: 0, - end_idx, - }; + let full_sc_entry = 0..end_idx; + // Copy into the packed buffer for (i, byte) in this_buf.iter().enumerate() { packed_buf[i] = *byte; } // Check that we can decode what we have copied so far - let full_sc_dec = SpacecraftData::from_der(&packed_buf[full_sc_entry.as_range()]).unwrap(); + let full_sc_dec = SpacecraftData::from_der(&packed_buf[full_sc_entry]).unwrap(); assert_eq!(full_sc_dec, full_sc); // Encode the other entry let mut this_buf = vec![]; @@ -587,25 +577,21 @@ mod dataset_ut { for (i, byte) in this_buf.iter().enumerate() { packed_buf[i + end_idx as usize] = *byte; } - let srp_sc_entry = Entry { - start_idx: end_idx, - end_idx: end_idx + this_buf.len() as u32, - }; + let srp_sc_entry = end_idx..end_idx + this_buf.len(); // Check that we can decode the next entry - let srp_sc_dec = SpacecraftData::from_der(&packed_buf[srp_sc_entry.as_range()]).unwrap(); + let srp_sc_dec = SpacecraftData::from_der(&packed_buf[srp_sc_entry]).unwrap(); assert_eq!(srp_sc_dec, srp_sc); - // Build the lookup table - let mut lut = LookUpTable::default(); - lut.append(-20, "SRP spacecraft", srp_sc_entry).unwrap(); - lut.append(-50, "Full spacecraft", full_sc_entry).unwrap(); // Build the dataset - let mut dataset = DataSet { - lut, - // bytes: Bytes::copy_from_slice(&packed_buf), - ..Default::default() - }; - dataset.data.push(srp_sc.clone()); - dataset.data.push(full_sc.clone()); + let mut dataset = DataSet::default(); + + // Build the lookup table + dataset + .push(srp_sc.clone(), Some(-20), Some("SRP spacecraft")) + .unwrap(); + dataset + .push(full_sc.clone(), Some(-50), Some("Full spacecraft")) + .unwrap(); + dataset.set_crc32(); // And encode it. @@ -739,7 +725,7 @@ mod dataset_ut { let mut ebuf = vec![]; dataset.encode_to_vec(&mut ebuf).unwrap(); - assert_eq!(ebuf.len(), 523); + assert_eq!(ebuf.len(), 506); let repr_dec = SpacecraftDataSet::from_bytes(ebuf); diff --git a/anise/src/structure/lookuptable.rs b/anise/src/structure/lookuptable.rs index 963225d5..be8ca467 100644 --- a/anise/src/structure/lookuptable.rs +++ b/anise/src/structure/lookuptable.rs @@ -15,7 +15,7 @@ use heapless::{FnvIndexMap, String}; use log::warn; use snafu::prelude::*; -use crate::{errors::DecodingError, NaifId}; +use crate::NaifId; /// Maximum length of a look up table name string pub const KEY_NAME_LEN: usize = 32; @@ -37,51 +37,8 @@ pub enum LutError { UnknownId { id: NaifId }, #[snafu(display("name {name} not in look up table"))] UnknownName { name: String }, -} - -/// A lookup table entry contains the start and end indexes in the data array of the data that is sought after. -/// -/// # Implementation note -/// This data is stored as a u32 to ensure that the same binary representation works on all platforms. -/// In fact, the size of the usize type varies based on whether this is a 32 or 64 bit platform. -#[derive(Copy, Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Entry { - pub start_idx: u32, - pub end_idx: u32, -} - -impl Entry { - pub(crate) fn as_range(&self) -> core::ops::Range { - self.start_idx as usize..self.end_idx as usize - } - /// Returns a pre-populated decoding error - pub(crate) fn decoding_error(&self) -> DecodingError { - DecodingError::InaccessibleBytes { - start: self.start_idx as usize, - end: self.end_idx as usize, - size: (self.end_idx - self.start_idx) as usize, - } - } -} - -impl Encode for Entry { - fn encoded_len(&self) -> der::Result { - self.start_idx.encoded_len()? + self.end_idx.encoded_len()? - } - - fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - self.start_idx.encode(encoder)?; - self.end_idx.encode(encoder) - } -} - -impl<'a> Decode<'a> for Entry { - fn decode>(decoder: &mut R) -> der::Result { - Ok(Self { - start_idx: decoder.decode()?, - end_idx: decoder.decode()?, - }) - } + #[snafu(display("Look up table index is not in dataset"))] + InvalidIndex { index: u32 }, } /// A LookUpTable allows finding the [Entry] associated with either an ID or a name. @@ -91,38 +48,38 @@ impl<'a> Decode<'a> for Entry { #[derive(Clone, Default, Debug, PartialEq, Eq)] pub struct LookUpTable { /// Unique IDs of each item in the - pub by_id: FnvIndexMap, + pub by_id: FnvIndexMap, /// Corresponding index for each hash - pub by_name: FnvIndexMap, Entry, ENTRIES>, + pub by_name: FnvIndexMap, u32, ENTRIES>, } impl LookUpTable { - pub fn append(&mut self, id: i32, name: &str, entry: Entry) -> Result<(), LutError> { + pub fn append(&mut self, id: i32, name: &str, index: u32) -> Result<(), LutError> { self.by_id - .insert(id, entry) + .insert(id, index) .map_err(|_| LutError::IdLutFull { max_slots: ENTRIES })?; self.by_name - .insert(name.try_into().unwrap(), entry) + .insert(name.try_into().unwrap(), index) .map_err(|_| LutError::NameLutFull { max_slots: ENTRIES })?; Ok(()) } - pub fn append_id(&mut self, id: i32, entry: Entry) -> Result<(), LutError> { + pub fn append_id(&mut self, id: i32, index: u32) -> Result<(), LutError> { self.by_id - .insert(id, entry) + .insert(id, index) .map_err(|_| LutError::IdLutFull { max_slots: ENTRIES })?; Ok(()) } - pub fn append_name(&mut self, name: &str, entry: Entry) -> Result<(), LutError> { + pub fn append_name(&mut self, name: &str, index: u32) -> Result<(), LutError> { self.by_name - .insert(name.try_into().unwrap(), entry) + .insert(name.try_into().unwrap(), index) .map_err(|_| LutError::NameLutFull { max_slots: ENTRIES })?; Ok(()) } /// Returns the list of entries of this LUT - pub fn entries(&self) -> FnvIndexMap, Option>), ENTRIES> { + pub fn entries(&self) -> FnvIndexMap, Option>), ENTRIES> { let mut rtn = FnvIndexMap::default(); for (id, entry) in &self.by_id { @@ -225,28 +182,28 @@ impl LookUpTable { &self, ) -> ( SequenceOf, - SequenceOf, + SequenceOf, SequenceOf, - SequenceOf, + SequenceOf, ) { // Build the list of entries - let mut id_entries = SequenceOf::::new(); - let mut name_entries = SequenceOf::::new(); + let mut id_entries = SequenceOf::::new(); + let mut name_entries = SequenceOf::::new(); // Build the list of keys let mut ids = SequenceOf::::new(); - for (id, entry) in &self.by_id { + for (id, index) in &self.by_id { ids.add(*id).unwrap(); - id_entries.add(*entry).unwrap(); + id_entries.add(*index).unwrap(); } // Build the list of names let mut names = SequenceOf::::new(); - for (name, entry) in &self.by_name { + for (name, index) in &self.by_name { names .add(OctetStringRef::new(name.as_bytes()).unwrap()) .unwrap(); - name_entries.add(*entry).unwrap(); + name_entries.add(*index).unwrap(); } (ids, id_entries, names, name_entries) @@ -276,12 +233,12 @@ impl<'a, const ENTRIES: usize> Decode<'a> for LookUpTable { // Decode as sequences and use that to build the look up table. let mut lut = Self::default(); let ids: SequenceOf = decoder.decode()?; - let id_entries: SequenceOf = decoder.decode()?; + let id_entries: SequenceOf = decoder.decode()?; let names: SequenceOf = decoder.decode()?; - let name_entries: SequenceOf = decoder.decode()?; + let name_entries: SequenceOf = decoder.decode()?; - for (id, entry) in ids.iter().zip(id_entries.iter()) { - lut.by_id.insert(*id, *entry).unwrap(); + for (id, index) in ids.iter().zip(id_entries.iter()) { + lut.by_id.insert(*id, *index).unwrap(); } for (name, entry) in names.iter().zip(name_entries.iter()) { @@ -307,7 +264,7 @@ impl<'a, const ENTRIES: usize> Decode<'a> for LookUpTable { #[cfg(test)] mod lut_ut { - use super::{Decode, Encode, Entry, LookUpTable}; + use super::{Decode, Encode, LookUpTable}; #[test] fn zero_repr() { let repr = LookUpTable::<2>::default(); @@ -326,17 +283,9 @@ mod lut_ut { #[test] fn repr_ids_only() { let mut repr = LookUpTable::<32>::default(); - let num_bytes = 363; for i in 0..32 { let id = -20 - i; - repr.append_id( - id, - Entry { - start_idx: (i * num_bytes) as u32, - end_idx: ((i + 1) * num_bytes) as u32, - }, - ) - .unwrap(); + repr.append_id(id, 0).unwrap(); } let mut buf = vec![]; @@ -354,21 +303,12 @@ mod lut_ut { let mut names = Vec::new(); let mut repr = LookUpTable::::default(); - let num_bytes = 363; - for i in 0..LUT_SIZE { names.push(format!("Name{}", i)); } for (i, name) in names.iter().enumerate().take(LUT_SIZE) { - repr.append_name( - name, - Entry { - start_idx: (i * num_bytes) as u32, - end_idx: ((i + 1) * num_bytes) as u32, - }, - ) - .unwrap(); + repr.append_name(name, i as u32).unwrap(); } let mut buf = vec![]; @@ -384,16 +324,16 @@ mod lut_ut { let mut lut = LookUpTable::<8>::default(); assert!(lut.check_integrity()); // Empty, passes - lut.append(1, "a", Entry::default()).unwrap(); + lut.append(1, "a", 0).unwrap(); assert!(lut.check_integrity()); // ID only, passes - lut.append_name("a", Entry::default()).unwrap(); + lut.append_name("a", 1).unwrap(); assert!(lut.check_integrity()); // Name added, passes - lut.append(2, "b", Entry::default()).unwrap(); + lut.append(2, "b", 11).unwrap(); assert!(lut.check_integrity()); // Second ID, name missing, fails - lut.append_name("b", Entry::default()).unwrap(); + lut.append_name("b", 111).unwrap(); assert!(lut.check_integrity()); // Name added, passes } } From ca049e85db89777f5fc4ab5c6b46713c0b08d4df Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 29 May 2024 21:05:21 -0600 Subject: [PATCH 05/17] test_anise_conversion overflows its stack Could it be an issue of having a SequenceOf within a SequenceOf? --- anise/src/structure/lookuptable.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/anise/src/structure/lookuptable.rs b/anise/src/structure/lookuptable.rs index be8ca467..bcc4ef61 100644 --- a/anise/src/structure/lookuptable.rs +++ b/anise/src/structure/lookuptable.rs @@ -327,13 +327,13 @@ mod lut_ut { lut.append(1, "a", 0).unwrap(); assert!(lut.check_integrity()); // ID only, passes - lut.append_name("a", 1).unwrap(); + lut.append_name("a", 0).unwrap(); assert!(lut.check_integrity()); // Name added, passes lut.append(2, "b", 11).unwrap(); assert!(lut.check_integrity()); // Second ID, name missing, fails - lut.append_name("b", 111).unwrap(); + lut.append_name("b", 11).unwrap(); assert!(lut.check_integrity()); // Name added, passes } } From 9717e1b99ad28b101c86556b5084ba5b8c7c191d Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 29 May 2024 21:06:24 -0600 Subject: [PATCH 06/17] Improve usage of snafu --- anise-cli/src/main.rs | 72 ++++++++++---------- anise/src/almanac/aer.rs | 12 ++-- anise/src/almanac/metaload/metaalmanac.rs | 2 +- anise/src/almanac/metaload/mod.rs | 2 +- anise/src/almanac/mod.rs | 29 ++++---- anise/src/almanac/planetary.rs | 2 +- anise/src/almanac/python.rs | 2 +- anise/src/almanac/transform.rs | 24 +++---- anise/src/ephemerides/paths.rs | 2 +- anise/src/ephemerides/translate_to_parent.rs | 21 +++--- anise/src/ephemerides/translations.rs | 4 +- anise/src/frames/frame.rs | 4 +- anise/src/naif/daf/daf.rs | 14 ++-- anise/src/naif/daf/datatypes/chebyshev.rs | 2 +- anise/src/naif/daf/datatypes/hermite.rs | 4 +- anise/src/naif/daf/file_record.rs | 4 +- anise/src/naif/daf/mut_daf.rs | 4 +- anise/src/orientations/paths.rs | 6 +- anise/src/orientations/rotate_to_parent.rs | 14 ++-- anise/src/orientations/rotations.rs | 20 +++--- anise/src/structure/dataset/mod.rs | 47 ++++++------- 21 files changed, 142 insertions(+), 149 deletions(-) diff --git a/anise-cli/src/main.rs b/anise-cli/src/main.rs index 2dbd6c3b..31b05cb6 100644 --- a/anise-cli/src/main.rs +++ b/anise-cli/src/main.rs @@ -64,7 +64,7 @@ fn main() -> Result<(), CliErrors> { crc32_checksum, } => { let path_str = file.clone(); - let bytes = file2heap!(file).with_context(|_| AniseSnafu)?; + let bytes = file2heap!(file).context(AniseSnafu)?; // Try to load this as a dataset by first trying to load the metadata if let Ok(metadata) = Metadata::decode_header(&bytes) { // Now, we can load this depending on the kind of data that it is @@ -73,21 +73,21 @@ fn main() -> Result<(), CliErrors> { DataSetType::SpacecraftData => { // Decode as spacecraft data let dataset = SpacecraftDataSet::try_from_bytes(bytes) - .with_context(|_| CliDataSetSnafu)?; + .context(CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } DataSetType::PlanetaryData => { // Decode as planetary data let dataset = PlanetaryDataSet::try_from_bytes(bytes) - .with_context(|_| CliDataSetSnafu)?; + .context(CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } DataSetType::EulerParameterData => { // Decode as euler parameter data let dataset = EulerParameterDataSet::try_from_bytes(bytes) - .with_context(|_| CliDataSetSnafu)?; + .context(CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } @@ -97,19 +97,19 @@ fn main() -> Result<(), CliErrors> { let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); match file_record .identification() - .with_context(|_| CliFileRecordSnafu)? + .context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); BPC::check_then_parse(bytes, crc32_checksum) - .with_context(|_| CliDAFSnafu)?; + .context(CliDAFSnafu)?; info!("[OK] Checksum matches"); Ok(()) } "SPK" => { info!("Loading {path_str:?} as DAF/SPK"); SPK::check_then_parse(bytes, crc32_checksum) - .with_context(|_| CliDAFSnafu)?; + .context(CliDAFSnafu)?; info!("[OK] Checksum matches"); Ok(()) } @@ -119,19 +119,19 @@ fn main() -> Result<(), CliErrors> { } Actions::Inspect { file } => { let path_str = file.clone(); - let bytes = file2heap!(file).with_context(|_| AniseSnafu)?; + let bytes = file2heap!(file).context(AniseSnafu)?; // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); match file_record .identification() - .with_context(|_| CliFileRecordSnafu)? + .context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); - let pck = BPC::parse(bytes).with_context(|_| CliDAFSnafu)?; + let pck = BPC::parse(bytes).context(CliDAFSnafu)?; info!("CRC32 checksum: 0x{:X}", pck.crc32()); - if let Some(comments) = pck.comments().with_context(|_| CliDAFSnafu)? { + if let Some(comments) = pck.comments().context(CliDAFSnafu)? { println!("== COMMENTS ==\n{}== END ==", comments); } else { println!("(File has no comments)"); @@ -141,10 +141,10 @@ fn main() -> Result<(), CliErrors> { } "SPK" => { info!("Loading {path_str:?} as DAF/SPK"); - let spk = SPK::parse(bytes).with_context(|_| CliDAFSnafu)?; + let spk = SPK::parse(bytes).context(CliDAFSnafu)?; info!("CRC32 checksum: 0x{:X}", spk.crc32()); - if let Some(comments) = spk.comments().with_context(|_| CliDAFSnafu)? { + if let Some(comments) = spk.comments().context(CliDAFSnafu)? { println!("== COMMENTS ==\n{}== END ==", comments); } else { println!("(File has no comments)"); @@ -162,11 +162,11 @@ fn main() -> Result<(), CliErrors> { gmfile, outfile, } => { - let dataset = convert_tpc(pckfile, gmfile).with_context(|_| CliDataSetSnafu)?; + let dataset = convert_tpc(pckfile, gmfile).context(CliDataSetSnafu)?; dataset .save_as(&outfile, false) - .with_context(|_| CliDataSetSnafu)?; + .context(CliDataSetSnafu)?; Ok(()) } @@ -175,7 +175,7 @@ fn main() -> Result<(), CliErrors> { dataset .save_as(&outfile, false) - .with_context(|_| CliDataSetSnafu)?; + .context(CliDataSetSnafu)?; Ok(()) } @@ -194,29 +194,29 @@ fn main() -> Result<(), CliErrors> { ); let path_str = input.clone(); - let bytes = file2heap!(input).with_context(|_| AniseSnafu)?; + let bytes = file2heap!(input).context(AniseSnafu)?; // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); match file_record .identification() - .with_context(|_| CliFileRecordSnafu)? + .context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); - let pck = BPC::parse(bytes).with_context(|_| CliDAFSnafu)?; + let pck = BPC::parse(bytes).context(CliDAFSnafu)?; let mut ids = HashSet::new(); - for summary in pck.data_summaries().with_context(|_| CliDAFSnafu)? { + for summary in pck.data_summaries().context(CliDAFSnafu)? { ids.insert(summary.id()); } info!("IDs present in file: {ids:?}"); - let (summary, idx) = pck.summary_from_id(id).with_context(|_| CliDAFSnafu)?; + let (summary, idx) = pck.summary_from_id(id).context(CliDAFSnafu)?; let data_type = - DafDataType::try_from(summary.data_type_i).with_context(|_| CliDAFSnafu)?; + DafDataType::try_from(summary.data_type_i).context(CliDAFSnafu)?; ensure!( data_type == DafDataType::Type2ChebyshevTriplet, ArgumentSnafu { @@ -245,20 +245,20 @@ fn main() -> Result<(), CliErrors> { } "SPK" => { info!("Loading {path_str:?} as DAF/PCK"); - let spk = SPK::parse(bytes).with_context(|_| CliDAFSnafu)?; + let spk = SPK::parse(bytes).context(CliDAFSnafu)?; let mut ids = HashSet::new(); - for summary in spk.data_summaries().with_context(|_| CliDAFSnafu)? { + for summary in spk.data_summaries().context(CliDAFSnafu)? { ids.insert(summary.id()); } info!("IDs present in file: {ids:?}"); - let (summary, idx) = spk.summary_from_id(id).with_context(|_| CliDAFSnafu)?; + let (summary, idx) = spk.summary_from_id(id).context(CliDAFSnafu)?; info!("Modifying {summary}"); let data_type = - DafDataType::try_from(summary.data_type_i).with_context(|_| CliDAFSnafu)?; + DafDataType::try_from(summary.data_type_i).context(CliDAFSnafu)?; ensure!( data_type == DafDataType::Type2ChebyshevTriplet, ArgumentSnafu { @@ -292,31 +292,31 @@ fn main() -> Result<(), CliErrors> { } Actions::RmDAFById { input, output, id } => { let path_str = input.clone(); - let bytes = file2heap!(input).with_context(|_| AniseSnafu)?; + let bytes = file2heap!(input).context(AniseSnafu)?; // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); match file_record .identification() - .with_context(|_| CliFileRecordSnafu)? + .context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); - let pck = BPC::parse(bytes).with_context(|_| CliDAFSnafu)?; + let pck = BPC::parse(bytes).context(CliDAFSnafu)?; let mut ids = HashSet::new(); - for summary in pck.data_summaries().with_context(|_| CliDAFSnafu)? { + for summary in pck.data_summaries().context(CliDAFSnafu)? { ids.insert(summary.id()); } info!("IDs present in file: {ids:?}"); - let (_, idx) = pck.summary_from_id(id).with_context(|_| CliDAFSnafu)?; + let (_, idx) = pck.summary_from_id(id).context(CliDAFSnafu)?; let mut my_pck_mut = pck.to_mutable(); my_pck_mut .delete_nth_data(idx) - .with_context(|_| CliDAFSnafu)?; + .context(CliDAFSnafu)?; info!("Saving file to {output:?}"); my_pck_mut.persist(output).unwrap(); @@ -325,21 +325,21 @@ fn main() -> Result<(), CliErrors> { } "SPK" => { info!("Loading {path_str:?} as DAF/PCK"); - let spk = SPK::parse(bytes).with_context(|_| CliDAFSnafu)?; + let spk = SPK::parse(bytes).context(CliDAFSnafu)?; let mut ids = HashSet::new(); - for summary in spk.data_summaries().with_context(|_| CliDAFSnafu)? { + for summary in spk.data_summaries().context(CliDAFSnafu)? { ids.insert(summary.id()); } info!("IDs present in file: {ids:?}"); - let (_, idx) = spk.summary_from_id(id).with_context(|_| CliDAFSnafu)?; + let (_, idx) = spk.summary_from_id(id).context(CliDAFSnafu)?; let mut my_spk_mut = spk.to_mutable(); my_spk_mut .delete_nth_data(idx) - .with_context(|_| CliDAFSnafu)?; + .context(CliDAFSnafu)?; info!("Saving file to {output:?}"); my_spk_mut.persist(output).unwrap(); diff --git a/anise/src/almanac/aer.rs b/anise/src/almanac/aer.rs index ba114000..518e4b90 100644 --- a/anise/src/almanac/aer.rs +++ b/anise/src/almanac/aer.rs @@ -59,14 +59,14 @@ impl Almanac { // SEZ DCM is topo to fixed let sez_dcm = tx .dcm_from_topocentric_to_body_fixed(from) - .with_context(|_| EphemerisPhysicsSnafu { action: "" }) - .with_context(|_| EphemerisSnafu { + .context(EphemerisPhysicsSnafu { action: "" }) + .context(EphemerisSnafu { action: "computing SEZ DCM for AER", })?; let tx_sez = (sez_dcm.transpose() * tx) - .with_context(|_| EphemerisPhysicsSnafu { action: "" }) - .with_context(|_| EphemerisSnafu { + .context(EphemerisPhysicsSnafu { action: "" }) + .context(EphemerisSnafu { action: "transforming transmitter to SEZ", })?; @@ -74,8 +74,8 @@ impl Almanac { let rx_in_tx_frame = self.transform_to(rx, tx.frame, None)?; // Convert into SEZ frame let rx_sez = (sez_dcm.transpose() * rx_in_tx_frame) - .with_context(|_| EphemerisPhysicsSnafu { action: "" }) - .with_context(|_| EphemerisSnafu { + .context(EphemerisPhysicsSnafu { action: "" }) + .context(EphemerisSnafu { action: "transforming received to SEZ", })?; diff --git a/anise/src/almanac/metaload/metaalmanac.rs b/anise/src/almanac/metaload/metaalmanac.rs index 361d5998..20e170f3 100644 --- a/anise/src/almanac/metaload/metaalmanac.rs +++ b/anise/src/almanac/metaload/metaalmanac.rs @@ -57,7 +57,7 @@ impl MetaAlmanac { /// Fetch all of the URIs and return a loaded Almanac pub(crate) fn _process(&mut self) -> AlmanacResult { for (fno, file) in self.files.iter_mut().enumerate() { - file._process().with_context(|_| MetaSnafu { + file._process().context(MetaSnafu { fno, file: file.clone(), })?; diff --git a/anise/src/almanac/metaload/mod.rs b/anise/src/almanac/metaload/mod.rs index 3056faa5..85941433 100644 --- a/anise/src/almanac/metaload/mod.rs +++ b/anise/src/almanac/metaload/mod.rs @@ -53,7 +53,7 @@ pub enum MetaAlmanacError { impl Almanac { /// Load from the provided MetaFile. fn _load_from_metafile(&self, mut metafile: MetaFile) -> AlmanacResult { - metafile._process().with_context(|_| MetaSnafu { + metafile._process().context(MetaSnafu { fno: 0_usize, file: metafile.clone(), })?; diff --git a/anise/src/almanac/mod.rs b/anise/src/almanac/mod.rs index d18b3511..a20f0ee0 100644 --- a/anise/src/almanac/mod.rs +++ b/anise/src/almanac/mod.rs @@ -123,26 +123,26 @@ impl Almanac { "PCK" => { info!("Loading as DAF/PCK"); let bpc = BPC::parse(bytes) - .with_context(|_| BPCSnafu { + .context(BPCSnafu { action: "parsing bytes", }) - .with_context(|_| OrientationSnafu { + .context(OrientationSnafu { action: "from generic loading", })?; - self.with_bpc(bpc).with_context(|_| OrientationSnafu { + self.with_bpc(bpc).context(OrientationSnafu { action: "adding BPC file to context", }) } "SPK" => { info!("Loading as DAF/SPK"); let spk = SPK::parse(bytes) - .with_context(|_| SPKSnafu { + .context(SPKSnafu { action: "parsing bytes", }) - .with_context(|_| EphemerisSnafu { + .context(EphemerisSnafu { action: "from generic loading", })?; - self.with_spk(spk).with_context(|_| EphemerisSnafu { + self.with_spk(spk).context(EphemerisSnafu { action: "adding SPK file to context", }) } @@ -160,7 +160,7 @@ impl Almanac { DataSetType::NotApplicable => unreachable!("no such ANISE data yet"), DataSetType::SpacecraftData => { // Decode as spacecraft data - let dataset = SpacecraftDataSet::try_from_bytes(bytes).with_context(|_| { + let dataset = SpacecraftDataSet::try_from_bytes(bytes).context({ TLDataSetSnafu { action: "loading as spacecraft data", } @@ -169,7 +169,7 @@ impl Almanac { } DataSetType::PlanetaryData => { // Decode as planetary data - let dataset = PlanetaryDataSet::try_from_bytes(bytes).with_context(|_| { + let dataset = PlanetaryDataSet::try_from_bytes(bytes).context({ TLDataSetSnafu { action: "loading as planetary data", } @@ -178,12 +178,11 @@ impl Almanac { } DataSetType::EulerParameterData => { // Decode as euler parameter data - let dataset = - EulerParameterDataSet::try_from_bytes(bytes).with_context(|_| { - TLDataSetSnafu { - action: "loading Euler parameters", - } - })?; + let dataset = EulerParameterDataSet::try_from_bytes(bytes).context({ + TLDataSetSnafu { + action: "loading Euler parameters", + } + })?; Ok(self.with_euler_parameters(dataset)) } } @@ -200,7 +199,7 @@ impl Almanac { /// Generic function that tries to load the provided path guessing to the file type. pub fn load(&self, path: &str) -> AlmanacResult { // Load the data onto the heap - let bytes = file2heap!(path).with_context(|_| LoadingSnafu { + let bytes = file2heap!(path).context(LoadingSnafu { path: path.to_string(), })?; info!("Loading almanac from {path}"); diff --git a/anise/src/almanac/planetary.rs b/anise/src/almanac/planetary.rs index 401ee891..b9e2f6eb 100644 --- a/anise/src/almanac/planetary.rs +++ b/anise/src/almanac/planetary.rs @@ -33,7 +33,7 @@ impl Almanac { Ok(self .planetary_data .get_by_id(uid.ephemeris_id) - .with_context(|_| PlanetaryDataSetSnafu { + .context(PlanetaryDataSetSnafu { action: "fetching frame by its UID via ephemeris_id", })? .to_frame(uid)) diff --git a/anise/src/almanac/python.rs b/anise/src/almanac/python.rs index 64ba123a..e1b0f98e 100644 --- a/anise/src/almanac/python.rs +++ b/anise/src/almanac/python.rs @@ -22,7 +22,7 @@ impl Almanac { Ok(self .planetary_data .get_by_id(uid.ephemeris_id) - .with_context(|_| PlanetaryDataSetSnafu { + .context(PlanetaryDataSetSnafu { action: "fetching frame by its UID via ephemeris_id", })? .to_frame(uid.into())) diff --git a/anise/src/almanac/transform.rs b/anise/src/almanac/transform.rs index fc7eff45..350d4a75 100644 --- a/anise/src/almanac/transform.rs +++ b/anise/src/almanac/transform.rs @@ -48,19 +48,19 @@ impl Almanac { // Translate let state = self .translate(target_frame, observer_frame, epoch, ab_corr) - .with_context(|_| EphemerisSnafu { + .context(EphemerisSnafu { action: "transform from/to", })?; // Rotate let dcm = self .rotate_from_to(target_frame, observer_frame, epoch) - .with_context(|_| OrientationSnafu { + .context(OrientationSnafu { action: "transform from/to", })?; (dcm * state) - .with_context(|_| OrientationPhysicsSnafu {}) - .with_context(|_| OrientationSnafu { + .context(OrientationPhysicsSnafu {}) + .context(OrientationSnafu { action: "transform from/to", }) } @@ -77,20 +77,20 @@ impl Almanac { ) -> AlmanacResult { let state = self .translate_to(state, observer_frame, ab_corr) - .with_context(|_| EphemerisSnafu { + .context(EphemerisSnafu { action: "transform state", })?; // Compute the frame rotation let dcm = self .rotate_from_to(state.frame, observer_frame, state.epoch) - .with_context(|_| OrientationSnafu { + .context(OrientationSnafu { action: "transform state dcm", })?; (dcm * state) - .with_context(|_| OrientationPhysicsSnafu {}) - .with_context(|_| OrientationSnafu { + .context(OrientationPhysicsSnafu {}) + .context(OrientationSnafu { action: "transform state", }) } @@ -137,20 +137,20 @@ impl Almanac { distance_unit, time_unit, ) - .with_context(|_| EphemerisSnafu { + .context(EphemerisSnafu { action: "transform provided state", })?; // Compute the frame rotation let dcm = self .rotate_from_to(from_frame, to_frame, epoch) - .with_context(|_| OrientationSnafu { + .context(OrientationSnafu { action: "transform provided state dcm", })?; (dcm * state) - .with_context(|_| OrientationPhysicsSnafu {}) - .with_context(|_| OrientationSnafu { + .context(OrientationPhysicsSnafu {}) + .context(OrientationSnafu { action: "transform provided state", }) } diff --git a/anise/src/ephemerides/paths.rs b/anise/src/ephemerides/paths.rs index 5ef80637..7746d401 100644 --- a/anise/src/ephemerides/paths.rs +++ b/anise/src/ephemerides/paths.rs @@ -36,7 +36,7 @@ impl Almanac { for maybe_spk in self.spk_data.iter().take(self.num_loaded_spk()).rev() { let spk = maybe_spk.as_ref().unwrap(); - for summary in spk.data_summaries().with_context(|_| SPKSnafu { + for summary in spk.data_summaries().context(SPKSnafu { action: "finding ephemeris root", })? { // This summary exists, so we need to follow the branch of centers up the tree. diff --git a/anise/src/ephemerides/translate_to_parent.rs b/anise/src/ephemerides/translate_to_parent.rs index 98f8d0a9..996dad63 100644 --- a/anise/src/ephemerides/translate_to_parent.rs +++ b/anise/src/ephemerides/translate_to_parent.rs @@ -55,31 +55,32 @@ impl Almanac { let (pos_km, vel_km_s) = match summary.data_type()? { DafDataType::Type2ChebyshevTriplet => { - let data = spk_data - .nth_data::(idx_in_spk) - .with_context(|_| SPKSnafu { - action: "fetching data for interpolation", - })?; + let data = + spk_data + .nth_data::(idx_in_spk) + .context(SPKSnafu { + action: "fetching data for interpolation", + })?; data.evaluate(epoch, summary) - .with_context(|_| EphemInterpolationSnafu)? + .context(EphemInterpolationSnafu)? } DafDataType::Type9LagrangeUnequalStep => { let data = spk_data .nth_data::(idx_in_spk) - .with_context(|_| SPKSnafu { + .context(SPKSnafu { action: "fetching data for interpolation", })?; data.evaluate(epoch, summary) - .with_context(|_| EphemInterpolationSnafu)? + .context(EphemInterpolationSnafu)? } DafDataType::Type13HermiteUnequalStep => { let data = spk_data .nth_data::(idx_in_spk) - .with_context(|_| SPKSnafu { + .context(SPKSnafu { action: "fetching data for interpolation", })?; data.evaluate(epoch, summary) - .with_context(|_| EphemInterpolationSnafu)? + .context(EphemInterpolationSnafu)? } dtype => { return Err(EphemerisError::SPK { diff --git a/anise/src/ephemerides/translations.rs b/anise/src/ephemerides/translations.rs index 966914b6..38e84e2f 100644 --- a/anise/src/ephemerides/translations.rs +++ b/anise/src/ephemerides/translations.rs @@ -161,7 +161,7 @@ impl Almanac { if ab_corr.stellar { // Modifications based on transmission versus reception case is done in the function directly. rel_pos_km = stellar_aberration(rel_pos_km, obs_ssb_vel_km_s, ab_corr) - .with_context(|_| EphemerisPhysicsSnafu { + .context(EphemerisPhysicsSnafu { action: "computing stellar aberration", })?; } @@ -231,7 +231,7 @@ impl Almanac { frame: from_frame, }; - (input_state + frame_state).with_context(|_| EphemerisPhysicsSnafu { + (input_state + frame_state).context(EphemerisPhysicsSnafu { action: "translating states (likely a bug!)", }) } diff --git a/anise/src/frames/frame.rs b/anise/src/frames/frame.rs index ce684788..99698128 100644 --- a/anise/src/frames/frame.rs +++ b/anise/src/frames/frame.rs @@ -67,12 +67,12 @@ impl Frame { /// Attempts to create a new frame from its center and reference frame name. /// This function is compatible with the CCSDS OEM names. pub fn from_name(center: &str, ref_frame: &str) -> Result { - let ephemeris_id = id_to_celestial_name(center).with_context(|_| EphemerisSnafu { + let ephemeris_id = id_to_celestial_name(center).context(EphemerisSnafu { action: "converting center name to its ID", })?; let orientation_id = - id_to_orientation_name(ref_frame).with_context(|_| OrientationSnafu { + id_to_orientation_name(ref_frame).context(OrientationSnafu { action: "converting reference frame to its ID", })?; diff --git a/anise/src/naif/daf/daf.rs b/anise/src/naif/daf/daf.rs index 01a49d74..a52ed62f 100644 --- a/anise/src/naif/daf/daf.rs +++ b/anise/src/naif/daf/daf.rs @@ -84,7 +84,7 @@ impl GenericDAF { end: FileRecord::SIZE, size: self.bytes.len(), }) - .with_context(|_| DecodingDataSnafu { + .context(DecodingDataSnafu { idx: 0_usize, kind: R::NAME, })?, @@ -93,7 +93,7 @@ impl GenericDAF { // Check that the endian-ness is compatible with this platform. file_record .endianness() - .with_context(|_| FileRecordSnafu { kind: R::NAME })?; + .context(FileRecordSnafu { kind: R::NAME })?; Ok(file_record) } @@ -107,7 +107,7 @@ impl GenericDAF { end: rcrd_idx + RCRD_LEN, size: self.bytes.len(), }) - .with_context(|_| DecodingNameSnafu { kind: R::NAME })?; + .context(DecodingNameSnafu { kind: R::NAME })?; Ok(NameRecord::read_from(rcrd_bytes).unwrap()) } @@ -121,11 +121,11 @@ impl GenericDAF { end: rcrd_idx + RCRD_LEN, size: self.bytes.len(), }) - .with_context(|_| DecodingSummarySnafu { kind: R::NAME })?; + .context(DecodingSummarySnafu { kind: R::NAME })?; SummaryRecord::read_from(&rcrd_bytes[..SummaryRecord::SIZE]) .ok_or(DecodingError::Casting) - .with_context(|_| DecodingSummarySnafu { kind: R::NAME }) + .context(DecodingSummarySnafu { kind: R::NAME }) } /// Parses the data summaries on the fly. @@ -291,7 +291,7 @@ impl GenericDAF { .into_slice(); // Convert it - S::from_f64_slice(data).with_context(|_| DecodingDataSnafu { kind: R::NAME, idx }) + S::from_f64_slice(data).context(DecodingDataSnafu { kind: R::NAME, idx }) } pub fn comments(&self) -> Result, DAFError> { @@ -404,7 +404,7 @@ impl DAF { } pub fn load(path: &str) -> Result { - let bytes = file2heap!(path).with_context(|_| IOSnafu { + let bytes = file2heap!(path).context(IOSnafu { action: format!("loading {path:?}"), })?; diff --git a/anise/src/naif/daf/datatypes/chebyshev.rs b/anise/src/naif/daf/datatypes/chebyshev.rs index cf0e7f30..47239cef 100644 --- a/anise/src/naif/daf/datatypes/chebyshev.rs +++ b/anise/src/naif/daf/datatypes/chebyshev.rs @@ -155,7 +155,7 @@ impl<'a> NAIFDataSet<'a> for Type2ChebyshevSet<'a> { // Now, build the X, Y, Z data from the record data. let record = self .nth_record(spline_idx - 1) - .with_context(|_| InterpDecodingSnafu)?; + .context(InterpDecodingSnafu)?; let normalized_time = (epoch.to_et_seconds() - record.midpoint_et_s) / radius_s; diff --git a/anise/src/naif/daf/datatypes/hermite.rs b/anise/src/naif/daf/datatypes/hermite.rs index 45bc44c7..dcaf2c91 100644 --- a/anise/src/naif/daf/datatypes/hermite.rs +++ b/anise/src/naif/daf/datatypes/hermite.rs @@ -285,7 +285,7 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { // Oh wow, this state actually exists, no interpolation needed! Ok(self .nth_record(idx) - .with_context(|_| InterpDecodingSnafu)? + .context(InterpDecodingSnafu)? .to_pos_vel()) } Err(idx) => { @@ -310,7 +310,7 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { let mut vys = [0.0; MAX_SAMPLES]; let mut vzs = [0.0; MAX_SAMPLES]; for (cno, idx) in (first_idx..last_idx).enumerate() { - let record = self.nth_record(idx).with_context(|_| InterpDecodingSnafu)?; + let record = self.nth_record(idx).context(InterpDecodingSnafu)?; xs[cno] = record.x_km; ys[cno] = record.y_km; zs[cno] = record.z_km; diff --git a/anise/src/naif/daf/file_record.rs b/anise/src/naif/daf/file_record.rs index 1ea431df..a871d87b 100644 --- a/anise/src/naif/daf/file_record.rs +++ b/anise/src/naif/daf/file_record.rs @@ -115,7 +115,7 @@ impl FileRecord { pub fn endianness(&self) -> Result { let str_endianness = - core::str::from_utf8(&self.endian_str).with_context(|_| ParsingSnafu)?; + core::str::from_utf8(&self.endian_str).context(ParsingSnafu)?; let file_endian = if str_endianness == "LTL-IEEE" { Endian::Little @@ -135,7 +135,7 @@ impl FileRecord { pub fn internal_filename(&self) -> Result<&str, FileRecordError> { Ok(core::str::from_utf8(&self.internal_filename) - .with_context(|_| ParsingSnafu)? + .context(ParsingSnafu)? .trim()) } diff --git a/anise/src/naif/daf/mut_daf.rs b/anise/src/naif/daf/mut_daf.rs index f77bc7cc..77e3b6e1 100644 --- a/anise/src/naif/daf/mut_daf.rs +++ b/anise/src/naif/daf/mut_daf.rs @@ -44,7 +44,7 @@ impl MutDAF { } pub fn load(path: &str) -> Result { - let bytes = file2heap!(path).with_context(|_| IOSnafu { + let bytes = file2heap!(path).context(IOSnafu { action: format!("loading {path:?}"), })?; @@ -63,7 +63,7 @@ impl MutDAF { end: rcrd_idx + RCRD_LEN, size, }) - .with_context(|_| DecodingNameSnafu { kind: R::NAME })?; + .context(DecodingNameSnafu { kind: R::NAME })?; rcrd_bytes.copy_from_slice(new_name_record.as_bytes()); Ok(()) } diff --git a/anise/src/orientations/paths.rs b/anise/src/orientations/paths.rs index 75dc9b3d..e3e5f242 100644 --- a/anise/src/orientations/paths.rs +++ b/anise/src/orientations/paths.rs @@ -40,7 +40,7 @@ impl Almanac { for maybe_bpc in self.bpc_data.iter().take(self.num_loaded_bpc()).rev() { let bpc = maybe_bpc.as_ref().unwrap(); - for summary in bpc.data_summaries().with_context(|_| BPCSnafu { + for summary in bpc.data_summaries().context(BPCSnafu { action: "finding orientation root", })? { // This summary exists, so we need to follow the branch of centers up the tree. @@ -102,7 +102,7 @@ impl Almanac { let planetary_data = self .planetary_data .get_by_id(source.orientation_id) - .with_context(|_| OrientationDataSetSnafu)?; + .context(OrientationDataSetSnafu)?; planetary_data.parent_id } }; @@ -130,7 +130,7 @@ impl Almanac { let planetary_data = self .planetary_data .get_by_id(inertial_frame_id) - .with_context(|_| OrientationDataSetSnafu)?; + .context(OrientationDataSetSnafu)?; planetary_data.parent_id } }; diff --git a/anise/src/orientations/rotate_to_parent.rs b/anise/src/orientations/rotate_to_parent.rs index 85ea81a3..58f0a178 100644 --- a/anise/src/orientations/rotate_to_parent.rs +++ b/anise/src/orientations/rotate_to_parent.rs @@ -60,13 +60,13 @@ impl Almanac { // Compute the angles and their rates let (ra_dec_w, d_ra_dec_w) = match summary.data_type()? { DafDataType::Type2ChebyshevTriplet => { - let data = bpc_data - .nth_data::(idx_in_bpc) - .with_context(|_| BPCSnafu { + let data = bpc_data.nth_data::(idx_in_bpc).context( + BPCSnafu { action: "fetching data for interpolation", - })?; + }, + )?; data.evaluate(epoch, summary) - .with_context(|_| OrientationInterpolationSnafu)? + .context(OrientationInterpolationSnafu)? } dtype => { return Err(OrientationError::BPC { @@ -108,7 +108,7 @@ impl Almanac { let planetary_data = self .planetary_data .get_by_id(source.orientation_id) - .with_context(|_| OrientationDataSetSnafu)?; + .context(OrientationDataSetSnafu)?; // Fetch the parent info let system_data = match self.planetary_data.get_by_id(planetary_data.parent_id) { @@ -118,7 +118,7 @@ impl Almanac { planetary_data .rotation_to_parent(epoch, &system_data) - .with_context(|_| OrientationPhysicsSnafu) + .context(OrientationPhysicsSnafu) } } } diff --git a/anise/src/orientations/rotations.rs b/anise/src/orientations/rotations.rs index 4ff5a3b4..0e65506d 100644 --- a/anise/src/orientations/rotations.rs +++ b/anise/src/orientations/rotations.rs @@ -78,17 +78,15 @@ impl Almanac { let cur_dcm = self.rotation_to_parent(Frame::from_orient_ssb(next_parent), epoch)?; if dcm_fwrd.from == cur_dcm.from { - dcm_fwrd = - (cur_dcm * dcm_fwrd.transpose()).with_context(|_| OrientationPhysicsSnafu)?; + dcm_fwrd = (cur_dcm * dcm_fwrd.transpose()).context(OrientationPhysicsSnafu)?; } else if dcm_fwrd.from == cur_dcm.to { dcm_fwrd = (dcm_fwrd * cur_dcm) - .with_context(|_| OrientationPhysicsSnafu)? + .context(OrientationPhysicsSnafu)? .transpose(); } else if dcm_bwrd.to == cur_dcm.from { - dcm_bwrd = (cur_dcm * dcm_bwrd).with_context(|_| OrientationPhysicsSnafu)?; + dcm_bwrd = (cur_dcm * dcm_bwrd).context(OrientationPhysicsSnafu)?; } else if dcm_bwrd.to == cur_dcm.to { - dcm_bwrd = - (dcm_bwrd.transpose() * cur_dcm).with_context(|_| OrientationPhysicsSnafu)?; + dcm_bwrd = (dcm_bwrd.transpose() * cur_dcm).context(OrientationPhysicsSnafu)?; } else { return Err(OrientationError::Unreachable); } @@ -99,17 +97,17 @@ impl Almanac { } if dcm_fwrd.from == dcm_bwrd.from { - (dcm_bwrd * dcm_fwrd.transpose()).with_context(|_| OrientationPhysicsSnafu) + (dcm_bwrd * dcm_fwrd.transpose()).context(OrientationPhysicsSnafu) } else if dcm_fwrd.from == dcm_bwrd.to { Ok((dcm_fwrd * dcm_bwrd) - .with_context(|_| OrientationPhysicsSnafu)? + .context(OrientationPhysicsSnafu)? .transpose()) } else if dcm_fwrd.to == dcm_bwrd.to { Ok((dcm_fwrd.transpose() * dcm_bwrd) - .with_context(|_| OrientationPhysicsSnafu)? + .context(OrientationPhysicsSnafu)? .transpose()) } else { - (dcm_bwrd * dcm_fwrd).with_context(|_| OrientationPhysicsSnafu) + (dcm_bwrd * dcm_fwrd).context(OrientationPhysicsSnafu) } } @@ -140,6 +138,6 @@ impl Almanac { frame: from_frame, }; - (dcm * input_state).with_context(|_| OrientationPhysicsSnafu {}) + (dcm * input_state).context(OrientationPhysicsSnafu {}) } } diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index 9c8c76de..1e52ae15 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -66,10 +66,9 @@ impl DataSet { Ok(ctx) => { trace!("[try_from_bytes] loaded context successfully"); // Check the full integrity on load of the file. - ctx.check_integrity() - .with_context(|_| DataSetIntegritySnafu { - action: "loading data set from bytes", - })?; + ctx.check_integrity().context(DataSetIntegritySnafu { + action: "loading data set from bytes", + })?; Ok(ctx) } Err(_) => { @@ -81,7 +80,7 @@ impl DataSet { end: 5, size: bytes.len(), }) - .with_context(|_| DataDecodingSnafu { + .context(DataDecodingSnafu { action: "checking data set version", })?; match Semver::from_der(semver_bytes) { @@ -179,14 +178,12 @@ impl DataSet { match name { Some(name) => { // Both an ID and a name - self.lut - .append(id, name, index) - .with_context(|_| DataSetLutSnafu { - action: "pushing data with ID and name", - })?; + self.lut.append(id, name, index).context(DataSetLutSnafu { + action: "pushing data with ID and name", + })?; // If the ID is the body of a system with a single object, also insert it for the system ID. if [199, 299].contains(&id) { - self.lut.append(id / 100, name, index).with_context(|_| { + self.lut.append(id / 100, name, index).context({ DataSetLutSnafu { action: "pushing data with ID and name", } @@ -195,14 +192,12 @@ impl DataSet { } None => { // Only an ID and no name - self.lut - .append_id(id, index) - .with_context(|_| DataSetLutSnafu { - action: "pushing data with ID only", - })?; + self.lut.append_id(id, index).context(DataSetLutSnafu { + action: "pushing data with ID only", + })?; // If the ID is the body of a system with a single object, also insert it for the system ID. if [199, 299].contains(&id) { - self.lut.append_id(id / 100, index).with_context(|_| { + self.lut.append_id(id / 100, index).context({ DataSetLutSnafu { action: "pushing data with ID and name", } @@ -216,7 +211,7 @@ impl DataSet { // Only a name self.lut .append_name(name.unwrap(), index) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "pushing data with name only", })?; } else { @@ -241,7 +236,7 @@ impl DataSet { .get(*index as usize) .cloned() .ok_or_else(|| LutError::InvalidIndex { index: *index }) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "fetching by ID", }) } else { @@ -261,7 +256,7 @@ impl DataSet { .data .get_mut(*index as usize) .ok_or_else(|| LutError::InvalidIndex { index: *index }) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "fetching by ID", })? = new_value; @@ -283,14 +278,14 @@ impl DataSet { .data .get_mut(index as usize) .ok_or_else(|| LutError::InvalidIndex { index }) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "fetching by ID", })? = T::default(); // Search the names for that same entry. for (name, name_index) in &self.lut.by_name.clone() { if name_index == &index { - self.lut.rmname(name).with_context(|_| DataSetLutSnafu { + self.lut.rmname(name).context(DataSetLutSnafu { action: "removing by ID", })?; break; @@ -313,7 +308,7 @@ impl DataSet { .get(*index as usize) .cloned() .ok_or_else(|| LutError::InvalidIndex { index: *index }) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "fetching by name", }) } else { @@ -335,7 +330,7 @@ impl DataSet { .data .get_mut(*index as usize) .ok_or_else(|| LutError::InvalidIndex { index: *index }) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "fetching by ID", })? = new_value; @@ -359,14 +354,14 @@ impl DataSet { .data .get_mut(index as usize) .ok_or_else(|| LutError::InvalidIndex { index }) - .with_context(|_| DataSetLutSnafu { + .context(DataSetLutSnafu { action: "fetching by ID", })? = T::default(); // Search the names for that same entry. for (id, id_index) in &self.lut.by_id.clone() { if id_index == &index { - self.lut.rmid(*id).with_context(|_| DataSetLutSnafu { + self.lut.rmid(*id).context(DataSetLutSnafu { action: "removing by name", })?; break; From e5c19db893aacff75faddbbb54f9b36bc9c64b39 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Thu, 30 May 2024 20:55:56 -0600 Subject: [PATCH 07/17] Dataset items encoded sequentially outside of a sequence --- anise/src/structure/dataset/mod.rs | 64 ++++++++++++++++++++---------- anise/src/structure/lookuptable.rs | 5 +++ 2 files changed, 49 insertions(+), 20 deletions(-) diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index 1e52ae15..29e2bf6a 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -21,7 +21,10 @@ use crate::{ }; use core::fmt; use core::ops::Deref; -use der::{asn1::SequenceOf, Decode, Encode, Reader, Writer}; +use der::{ + asn1::{OctetString, SequenceOf}, + Decode, Encode, Reader, Writer, +}; use log::{error, trace}; use snafu::prelude::*; use std::mem::size_of; @@ -427,11 +430,7 @@ impl DataSet { /// Returns the length of the LONGEST of the two look up tables pub fn len(&self) -> usize { - if self.lut.by_id.len() > self.lut.by_name.len() { - self.lut.by_id.len() - } else { - self.lut.by_name.len() - } + self.lut.len() } /// Returns whether this dataset is empty @@ -440,38 +439,63 @@ impl DataSet { } /// Returns this data as a data sequence, cloning all of the entries into this sequence. - fn build_data_seq(&self) -> SequenceOf { - let mut data_seq = SequenceOf::::new(); - for d in &self.data { - data_seq.add(d.clone()).unwrap(); + fn build_data_seq(&self) -> (SequenceOf, OctetString) { + let mut buf = vec![]; + let mut bytes_meta = SequenceOf::::default(); + bytes_meta.add(self.data.len() as u32).unwrap(); + for data in &self.data { + let mut this_buf = vec![]; + data.encode_to_vec(&mut this_buf).unwrap(); + bytes_meta.add(this_buf.len() as u32).unwrap(); + buf.extend_from_slice(&this_buf); } - data_seq + let bytes = OctetString::new(buf).unwrap(); + (bytes_meta, bytes) } } impl Encode for DataSet { fn encoded_len(&self) -> der::Result { + let (bytes_meta, bytes) = self.build_data_seq(); self.metadata.encoded_len()? + self.lut.encoded_len()? + self.data_checksum.encoded_len()? - + self.build_data_seq().encoded_len()? + + bytes_meta.encoded_len()? + + bytes.encoded_len()? } fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { + let (bytes_meta, bytes) = self.build_data_seq(); self.metadata.encode(encoder)?; self.lut.encode(encoder)?; self.data_checksum.encode(encoder)?; - self.build_data_seq().encode(encoder) + bytes_meta.encode(encoder)?; + bytes.encode(encoder) } } impl<'a, T: DataSetT, const ENTRIES: usize> Decode<'a> for DataSet { fn decode>(decoder: &mut D) -> der::Result { let metadata = decoder.decode()?; - let lut = decoder.decode()?; + let lut: LookUpTable = decoder.decode()?; let crc32_checksum = decoder.decode()?; - let data_seq: SequenceOf = decoder.decode()?; - let data: Vec = data_seq.iter().cloned().collect(); + // Metadata of the bytes to decode. + // The first integer contains the number of usable items in the data. + // The other integers are the encoded lengths of each of the data. + let bytes_meta: SequenceOf = decoder.decode()?; + let der_octets: OctetString = decoder.decode()?; + let bytes = der_octets.as_bytes(); + + let mut data = vec![]; + + let mut idx = 0; + for meta_idx in 0..*bytes_meta.get(0).unwrap() as usize { + let next_len = *bytes_meta.get(meta_idx + 1).unwrap() as usize; + let this_data = T::from_der(&bytes[idx..idx + next_len]).unwrap(); + data.push(this_data); + idx += next_len; + } + Ok(Self { metadata, lut, @@ -511,15 +535,15 @@ mod dataset_ut { let mut buf = vec![]; repr.encode_to_vec(&mut buf).unwrap(); - assert_eq!(buf.len(), 58); + assert_eq!(buf.len(), 63); let repr_dec = DataSet::from_der(&buf).unwrap(); assert_eq!(repr, repr_dec); dbg!(repr); - assert_eq!(core::mem::size_of::>(), 288); - assert_eq!(core::mem::size_of::>(), 10368); + assert_eq!(core::mem::size_of::>(), 256); + assert_eq!(core::mem::size_of::>(), 8824); } #[test] @@ -720,7 +744,7 @@ mod dataset_ut { let mut ebuf = vec![]; dataset.encode_to_vec(&mut ebuf).unwrap(); - assert_eq!(ebuf.len(), 506); + // assert_eq!(ebuf.len(), 506); let repr_dec = SpacecraftDataSet::from_bytes(ebuf); diff --git a/anise/src/structure/lookuptable.rs b/anise/src/structure/lookuptable.rs index bcc4ef61..4f8b3c85 100644 --- a/anise/src/structure/lookuptable.rs +++ b/anise/src/structure/lookuptable.rs @@ -154,6 +154,11 @@ impl LookUpTable { } } + /// Returns the length of the LONGEST of the two look up indexes + pub fn len(&self) -> usize { + self.by_id.len().max(self.by_name.len()) + } + pub(crate) fn check_integrity(&self) -> bool { if self.by_id.is_empty() || self.by_name.is_empty() { // If either map is empty, the LUT is integral because there cannot be From a818499f17e4f0e747c4b738717b1c31b7d5236f Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Thu, 30 May 2024 23:17:53 -0600 Subject: [PATCH 08/17] Run lints and update ANISE semver --- anise/src/frames/frame.rs | 7 +++---- anise/src/naif/daf/file_record.rs | 3 +-- anise/src/structure/mod.rs | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/anise/src/frames/frame.rs b/anise/src/frames/frame.rs index 99698128..76ee22e9 100644 --- a/anise/src/frames/frame.rs +++ b/anise/src/frames/frame.rs @@ -71,10 +71,9 @@ impl Frame { action: "converting center name to its ID", })?; - let orientation_id = - id_to_orientation_name(ref_frame).context(OrientationSnafu { - action: "converting reference frame to its ID", - })?; + let orientation_id = id_to_orientation_name(ref_frame).context(OrientationSnafu { + action: "converting reference frame to its ID", + })?; Ok(Self::new(ephemeris_id, orientation_id)) } diff --git a/anise/src/naif/daf/file_record.rs b/anise/src/naif/daf/file_record.rs index a871d87b..21c65e44 100644 --- a/anise/src/naif/daf/file_record.rs +++ b/anise/src/naif/daf/file_record.rs @@ -114,8 +114,7 @@ impl FileRecord { } pub fn endianness(&self) -> Result { - let str_endianness = - core::str::from_utf8(&self.endian_str).context(ParsingSnafu)?; + let str_endianness = core::str::from_utf8(&self.endian_str).context(ParsingSnafu)?; let file_endian = if str_endianness == "LTL-IEEE" { Endian::Little diff --git a/anise/src/structure/mod.rs b/anise/src/structure/mod.rs index 991555fb..7cbb90c0 100644 --- a/anise/src/structure/mod.rs +++ b/anise/src/structure/mod.rs @@ -30,7 +30,7 @@ use crate::{ /// The current version of ANISE pub const ANISE_VERSION: Semver = Semver { major: 0, - minor: 3, + minor: 4, patch: 0, }; From f10958856bfdba35a9681a426201d5888eefe9a3 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Mon, 3 Jun 2024 22:56:21 -0600 Subject: [PATCH 09/17] Format code --- anise-cli/src/main.rs | 50 ++++++++++++------------------------------- 1 file changed, 14 insertions(+), 36 deletions(-) diff --git a/anise-cli/src/main.rs b/anise-cli/src/main.rs index 31b05cb6..41e4c7e6 100644 --- a/anise-cli/src/main.rs +++ b/anise-cli/src/main.rs @@ -72,15 +72,15 @@ fn main() -> Result<(), CliErrors> { DataSetType::NotApplicable => unreachable!("no such ANISE data yet"), DataSetType::SpacecraftData => { // Decode as spacecraft data - let dataset = SpacecraftDataSet::try_from_bytes(bytes) - .context(CliDataSetSnafu)?; + let dataset = + SpacecraftDataSet::try_from_bytes(bytes).context(CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } DataSetType::PlanetaryData => { // Decode as planetary data - let dataset = PlanetaryDataSet::try_from_bytes(bytes) - .context(CliDataSetSnafu)?; + let dataset = + PlanetaryDataSet::try_from_bytes(bytes).context(CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } @@ -95,21 +95,16 @@ fn main() -> Result<(), CliErrors> { } else { // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - match file_record - .identification() - .context(CliFileRecordSnafu)? - { + match file_record.identification().context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); - BPC::check_then_parse(bytes, crc32_checksum) - .context(CliDAFSnafu)?; + BPC::check_then_parse(bytes, crc32_checksum).context(CliDAFSnafu)?; info!("[OK] Checksum matches"); Ok(()) } "SPK" => { info!("Loading {path_str:?} as DAF/SPK"); - SPK::check_then_parse(bytes, crc32_checksum) - .context(CliDAFSnafu)?; + SPK::check_then_parse(bytes, crc32_checksum).context(CliDAFSnafu)?; info!("[OK] Checksum matches"); Ok(()) } @@ -123,10 +118,7 @@ fn main() -> Result<(), CliErrors> { // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - match file_record - .identification() - .context(CliFileRecordSnafu)? - { + match file_record.identification().context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); let pck = BPC::parse(bytes).context(CliDAFSnafu)?; @@ -164,18 +156,14 @@ fn main() -> Result<(), CliErrors> { } => { let dataset = convert_tpc(pckfile, gmfile).context(CliDataSetSnafu)?; - dataset - .save_as(&outfile, false) - .context(CliDataSetSnafu)?; + dataset.save_as(&outfile, false).context(CliDataSetSnafu)?; Ok(()) } Actions::ConvertFk { fkfile, outfile } => { let dataset = convert_fk(fkfile, false).unwrap(); - dataset - .save_as(&outfile, false) - .context(CliDataSetSnafu)?; + dataset.save_as(&outfile, false).context(CliDataSetSnafu)?; Ok(()) } @@ -198,10 +186,7 @@ fn main() -> Result<(), CliErrors> { // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - match file_record - .identification() - .context(CliFileRecordSnafu)? - { + match file_record.identification().context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); let pck = BPC::parse(bytes).context(CliDAFSnafu)?; @@ -296,10 +281,7 @@ fn main() -> Result<(), CliErrors> { // Load the header only let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - match file_record - .identification() - .context(CliFileRecordSnafu)? - { + match file_record.identification().context(CliFileRecordSnafu)? { "PCK" => { info!("Loading {path_str:?} as DAF/PCK"); let pck = BPC::parse(bytes).context(CliDAFSnafu)?; @@ -314,9 +296,7 @@ fn main() -> Result<(), CliErrors> { let (_, idx) = pck.summary_from_id(id).context(CliDAFSnafu)?; let mut my_pck_mut = pck.to_mutable(); - my_pck_mut - .delete_nth_data(idx) - .context(CliDAFSnafu)?; + my_pck_mut.delete_nth_data(idx).context(CliDAFSnafu)?; info!("Saving file to {output:?}"); my_pck_mut.persist(output).unwrap(); @@ -337,9 +317,7 @@ fn main() -> Result<(), CliErrors> { let (_, idx) = spk.summary_from_id(id).context(CliDAFSnafu)?; let mut my_spk_mut = spk.to_mutable(); - my_spk_mut - .delete_nth_data(idx) - .context(CliDAFSnafu)?; + my_spk_mut.delete_nth_data(idx).context(CliDAFSnafu)?; info!("Saving file to {output:?}"); my_spk_mut.persist(output).unwrap(); From 1211af343751eff6d5d51032e8df61aa37680e60 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Mon, 3 Jun 2024 23:00:24 -0600 Subject: [PATCH 10/17] Actually update pca file --- data/pck11.pca | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data/pck11.pca b/data/pck11.pca index 40127161..6602d8fb 100644 --- a/data/pck11.pca +++ b/data/pck11.pca @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b1bb2589fa6ef9829a420d5b3fe47d1c7ca3850004fd03d0882a036eb3fd3826 -size 38133 +oid sha256:b3318f316f46b79b089b7e6a904948258d2c0809785188b8a4b1f33133d05301 +size 38068 From 43e56ee3762d54f69a046d7cfebdcb7f411d5001 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Tue, 4 Jun 2024 22:17:44 -0600 Subject: [PATCH 11/17] CRC computation with new dataset is NOT consistent! Must be fixed or CRC is meaningless --- anise/src/almanac/metaload/metaalmanac.rs | 8 ++++---- anise/src/almanac/metaload/mod.rs | 4 ++-- anise/src/structure/dataset/mod.rs | 10 ++-------- anise/src/structure/lookuptable.rs | 5 +++++ anise/tests/orientations/mod.rs | 3 +++ data/latest.dhall | 4 ++-- data/pck08.pca | 4 ++-- data/pck11.pca | 4 ++-- 8 files changed, 22 insertions(+), 20 deletions(-) diff --git a/anise/src/almanac/metaload/metaalmanac.rs b/anise/src/almanac/metaload/metaalmanac.rs index 7c94895a..c7c162cf 100644 --- a/anise/src/almanac/metaload/metaalmanac.rs +++ b/anise/src/almanac/metaload/metaalmanac.rs @@ -81,7 +81,7 @@ impl MetaAlmanac { /// /// # File list /// - - /// - + /// - /// - /// - /// @@ -155,7 +155,7 @@ impl MetaAlmanac { /// /// # File list /// - - /// - + /// - /// - /// - /// @@ -225,11 +225,11 @@ impl Default for MetaAlmanac { }, MetaFile { uri: nyx_cloud_stor.join("v0.4/pck11.pca").unwrap().to_string(), - crc32: Some(0xacd1fe17), + crc32: Some(0xa4f3efb0), }, MetaFile { uri: nyx_cloud_stor.join("v0.4/moon_fk.epa").unwrap().to_string(), - crc32: Some(0x7f27859c), + crc32: Some(0xb1c44f3c), }, MetaFile { uri: nyx_cloud_stor diff --git a/anise/src/almanac/metaload/mod.rs b/anise/src/almanac/metaload/mod.rs index 1a7edfb3..5babc402 100644 --- a/anise/src/almanac/metaload/mod.rs +++ b/anise/src/almanac/metaload/mod.rs @@ -121,10 +121,10 @@ mod meta_test { [ { crc32 = Some 1921414410 , uri = "http://public-data.nyxspace.com/anise/de440s.bsp" } - , { crc32 = Some 2899443223 + , { crc32 = Some 2767450032 , uri = "http://public-data.nyxspace.com/anise/v0.4/pck11.pca" } - , { crc32 = Some 2133296540 + , { crc32 = Some 2982432572 , uri = "http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa" } , { crc32 = Some 1817759242 diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index 29e2bf6a..a1c1c01a 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -27,7 +27,6 @@ use der::{ }; use log::{error, trace}; use snafu::prelude::*; -use std::mem::size_of; macro_rules! io_imports { () => { @@ -123,13 +122,8 @@ impl DataSet { /// Compute the CRC32 of the underlying bytes pub fn crc32(&self) -> u32 { - let size = ENTRIES * size_of::(); - let mut buf = Vec::with_capacity(size); - // Clone the data set, setting the CRC32 to zero for the CRC check. - let mut me = self.clone(); - me.data_checksum = u32::MAX; - let _ = me.encode_to_vec(&mut buf); - crc32fast::hash(&buf) + let bytes = self.build_data_seq().1; + crc32fast::hash(&bytes.as_bytes()) } /// Sets the checksum of this data. diff --git a/anise/src/structure/lookuptable.rs b/anise/src/structure/lookuptable.rs index 4f8b3c85..a34109b6 100644 --- a/anise/src/structure/lookuptable.rs +++ b/anise/src/structure/lookuptable.rs @@ -159,6 +159,11 @@ impl LookUpTable { self.by_id.len().max(self.by_name.len()) } + /// Returns whether this dataset is empty + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub(crate) fn check_integrity(&self) -> bool { if self.by_id.is_empty() || self.by_name.is_empty() { // If either map is empty, the LUT is integral because there cannot be diff --git a/anise/tests/orientations/mod.rs b/anise/tests/orientations/mod.rs index ee173c20..bcf0398c 100644 --- a/anise/tests/orientations/mod.rs +++ b/anise/tests/orientations/mod.rs @@ -20,6 +20,8 @@ fn test_find_root_from_pca() { .save_as(&PathBuf::from_str("../data/pck11.pca").unwrap(), true) .unwrap(); + println!("PCK11 CRC32: {}", planetary_data.crc32()); + let almanac = Almanac::default().load("../data/pck11.pca").unwrap(); assert_eq!(almanac.try_find_orientation_root(), Ok(J2000)); @@ -29,6 +31,7 @@ fn test_find_root_from_pca() { planetary_data .save_as(&PathBuf::from_str("../data/pck08.pca").unwrap(), true) .unwrap(); + println!("PCK08 CRC32: {}", planetary_data.crc32()); assert!(Almanac::default().load("../data/pck08.pca").is_ok()); } diff --git a/data/latest.dhall b/data/latest.dhall index 1be4c4da..cd22ff15 100644 --- a/data/latest.dhall +++ b/data/latest.dhall @@ -3,10 +3,10 @@ [ { crc32 = Some 1921414410 , uri = "http://public-data.nyxspace.com/anise/de440s.bsp" } - , { crc32 = Some 2220275152 + , { crc32 = Some 2767450032 , uri = "http://public-data.nyxspace.com/anise/v0.4/pck11.pca" } - , { crc32 = Some 3329024259 + , { crc32 = Some 2982432572 , uri = "http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa" } , { crc32 = Some 1817759242 diff --git a/data/pck08.pca b/data/pck08.pca index 622a11d1..6c977f9c 100644 --- a/data/pck08.pca +++ b/data/pck08.pca @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:4af4f90347e6f6851eef10ed9f4663c2160847a3997ae6b7f452ba9b298f5bf2 -size 33549 +oid sha256:af08f68c74844adceecc4168aff9a4c2089dba1fc85f3b56fb5ba549d27dcde8 +size 33497 diff --git a/data/pck11.pca b/data/pck11.pca index 6602d8fb..2e562405 100644 --- a/data/pck11.pca +++ b/data/pck11.pca @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b3318f316f46b79b089b7e6a904948258d2c0809785188b8a4b1f33133d05301 -size 38068 +oid sha256:daae65b36a9cac2a6a2104fdb37db0bcaa7ad325bf49da737d3bc13e5e486c5f +size 38067 From 488445d1e7cfa8efcecc274ed065e90188964d5e Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 5 Jun 2024 22:44:49 -0600 Subject: [PATCH 12/17] There is no order issue... issue was with the files on the cloud --- anise/src/almanac/metaload/metaalmanac.rs | 6 +++--- anise/src/structure/dataset/mod.rs | 9 +++++---- data/.gitignore | 3 ++- data/ci_config.dhall | 2 +- data/latest.dhall | 4 ++-- data/moon_fk.epa | Bin 0 -> 232 bytes data/pck08.pca | 4 ++-- data/pck11.pca | 2 +- 8 files changed, 16 insertions(+), 14 deletions(-) create mode 100644 data/moon_fk.epa diff --git a/anise/src/almanac/metaload/metaalmanac.rs b/anise/src/almanac/metaload/metaalmanac.rs index c7c162cf..4c230c83 100644 --- a/anise/src/almanac/metaload/metaalmanac.rs +++ b/anise/src/almanac/metaload/metaalmanac.rs @@ -221,15 +221,15 @@ impl Default for MetaAlmanac { files: vec![ MetaFile { uri: nyx_cloud_stor.join("de440s.bsp").unwrap().to_string(), - crc32: Some(0x7286750a), + crc32: Some(0xb71d6fa8), }, MetaFile { uri: nyx_cloud_stor.join("v0.4/pck11.pca").unwrap().to_string(), - crc32: Some(0xa4f3efb0), + crc32: Some(0x62a4f3ad), }, MetaFile { uri: nyx_cloud_stor.join("v0.4/moon_fk.epa").unwrap().to_string(), - crc32: Some(0xb1c44f3c), + crc32: Some(0x1175bd92), }, MetaFile { uri: nyx_cloud_stor diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index a1c1c01a..f424a96d 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -435,16 +435,17 @@ impl DataSet { /// Returns this data as a data sequence, cloning all of the entries into this sequence. fn build_data_seq(&self) -> (SequenceOf, OctetString) { let mut buf = vec![]; - let mut bytes_meta = SequenceOf::::default(); - bytes_meta.add(self.data.len() as u32).unwrap(); + // TODO: Return an octet string instead of seqof. We might overflow it! + let mut meta = SequenceOf::::default(); + meta.add(self.data.len() as u32).unwrap(); for data in &self.data { let mut this_buf = vec![]; data.encode_to_vec(&mut this_buf).unwrap(); - bytes_meta.add(this_buf.len() as u32).unwrap(); + meta.add(this_buf.len() as u32).unwrap(); buf.extend_from_slice(&this_buf); } let bytes = OctetString::new(buf).unwrap(); - (bytes_meta, bytes) + (meta, bytes) } } diff --git a/data/.gitignore b/data/.gitignore index 1d52f2d4..5cf5cd6b 100644 --- a/data/.gitignore +++ b/data/.gitignore @@ -1,2 +1,3 @@ !pck08.pca -!pck11.pca \ No newline at end of file +!pck11.pca +!moon_fk.epa \ No newline at end of file diff --git a/data/ci_config.dhall b/data/ci_config.dhall index 5f775dbf..ac1add8e 100644 --- a/data/ci_config.dhall +++ b/data/ci_config.dhall @@ -3,7 +3,7 @@ [ { crc32 = Some 1921414410 , uri = "http://public-data.nyxspace.com/anise/de440s.bsp" } - , { crc32 = Some 1483613890 + , { crc32 = Some 3072159656 , uri = "http://public-data.nyxspace.com/anise/v0.4/pck08.pca" } , { crc32 = None Natural diff --git a/data/latest.dhall b/data/latest.dhall index cd22ff15..679f2995 100644 --- a/data/latest.dhall +++ b/data/latest.dhall @@ -3,10 +3,10 @@ [ { crc32 = Some 1921414410 , uri = "http://public-data.nyxspace.com/anise/de440s.bsp" } - , { crc32 = Some 2767450032 + , { crc32 = Some 1654977453 , uri = "http://public-data.nyxspace.com/anise/v0.4/pck11.pca" } - , { crc32 = Some 2982432572 + , { crc32 = Some 292928914 , uri = "http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa" } , { crc32 = Some 1817759242 diff --git a/data/moon_fk.epa b/data/moon_fk.epa new file mode 100644 index 0000000000000000000000000000000000000000..2c53d8fcf472fd946781cce0b96ce94a953cb2e3 GIT binary patch literal 232 zcmZQ!W?*4pVr1q~G%_$U(KRpwq7VZUD`N{QBV#=aLrW7Qb5jcwh0qXZ9tHy*CZ_)+Mxa(kCPpR$MHY5ne}BLD07odp*Oi4AEaK}L@8W7=WQe4iiAAt(^b literal 0 HcmV?d00001 diff --git a/data/pck08.pca b/data/pck08.pca index 6c977f9c..77ecbe05 100644 --- a/data/pck08.pca +++ b/data/pck08.pca @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:af08f68c74844adceecc4168aff9a4c2089dba1fc85f3b56fb5ba549d27dcde8 -size 33497 +oid sha256:6ddc44fb8695224e9e234427194ee4a9a40ba8ccdfb5075c984e0588cd689557 +size 33498 diff --git a/data/pck11.pca b/data/pck11.pca index 2e562405..cc155d3a 100644 --- a/data/pck11.pca +++ b/data/pck11.pca @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:daae65b36a9cac2a6a2104fdb37db0bcaa7ad325bf49da737d3bc13e5e486c5f +oid sha256:4bbe31dd7fa986324343150d0feeaf78a7a102cb9167818b5cd8748db4f3e6a2 size 38067 From 40102880df0eb8fa5e064d94c5688656c8ea45c9 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Wed, 5 Jun 2024 23:03:20 -0600 Subject: [PATCH 13/17] Update CRC32 checksums --- anise/src/almanac/metaload/metaalmanac.rs | 2 +- anise/src/almanac/metaload/mod.rs | 4 ++-- anise/src/structure/dataset/mod.rs | 11 +++++------ anise/src/structure/lookuptable.rs | 2 +- anise/src/structure/metadata.rs | 2 +- 5 files changed, 10 insertions(+), 11 deletions(-) diff --git a/anise/src/almanac/metaload/metaalmanac.rs b/anise/src/almanac/metaload/metaalmanac.rs index 4c230c83..05c82496 100644 --- a/anise/src/almanac/metaload/metaalmanac.rs +++ b/anise/src/almanac/metaload/metaalmanac.rs @@ -221,7 +221,7 @@ impl Default for MetaAlmanac { files: vec![ MetaFile { uri: nyx_cloud_stor.join("de440s.bsp").unwrap().to_string(), - crc32: Some(0xb71d6fa8), + crc32: Some(0x7286750a), }, MetaFile { uri: nyx_cloud_stor.join("v0.4/pck11.pca").unwrap().to_string(), diff --git a/anise/src/almanac/metaload/mod.rs b/anise/src/almanac/metaload/mod.rs index 5babc402..ad174a6d 100644 --- a/anise/src/almanac/metaload/mod.rs +++ b/anise/src/almanac/metaload/mod.rs @@ -121,10 +121,10 @@ mod meta_test { [ { crc32 = Some 1921414410 , uri = "http://public-data.nyxspace.com/anise/de440s.bsp" } - , { crc32 = Some 2767450032 + , { crc32 = Some 1654977453 , uri = "http://public-data.nyxspace.com/anise/v0.4/pck11.pca" } - , { crc32 = Some 2982432572 + , { crc32 = Some 292928914 , uri = "http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa" } , { crc32 = Some 1817759242 diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index f424a96d..bffc9fa7 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -433,15 +433,14 @@ impl DataSet { } /// Returns this data as a data sequence, cloning all of the entries into this sequence. - fn build_data_seq(&self) -> (SequenceOf, OctetString) { - let mut buf = vec![]; - // TODO: Return an octet string instead of seqof. We might overflow it! - let mut meta = SequenceOf::::default(); - meta.add(self.data.len() as u32).unwrap(); + fn build_data_seq(&self) -> (Vec, OctetString) { + let mut buf = Vec::with_capacity(ENTRIES * 2); + let mut meta = Vec::with_capacity(ENTRIES * 2); + meta.push(self.data.len() as u32); for data in &self.data { let mut this_buf = vec![]; data.encode_to_vec(&mut this_buf).unwrap(); - meta.add(this_buf.len() as u32).unwrap(); + meta.push(this_buf.len() as u32); buf.extend_from_slice(&this_buf); } let bytes = OctetString::new(buf).unwrap(); diff --git a/anise/src/structure/lookuptable.rs b/anise/src/structure/lookuptable.rs index a34109b6..12c5e876 100644 --- a/anise/src/structure/lookuptable.rs +++ b/anise/src/structure/lookuptable.rs @@ -287,7 +287,7 @@ mod lut_ut { assert_eq!(repr, repr_dec); dbg!(repr); - assert_eq!(core::mem::size_of::>(), 5136); + assert_eq!(core::mem::size_of::>(), 4368); } #[test] diff --git a/anise/src/structure/metadata.rs b/anise/src/structure/metadata.rs index 0f8442a2..90ef1aca 100644 --- a/anise/src/structure/metadata.rs +++ b/anise/src/structure/metadata.rs @@ -146,7 +146,7 @@ mod metadata_ut { assert_eq!( format!("{repr}"), format!( - r#"ANISE version ANISE version 0.3.0 + r#"ANISE version ANISE version 0.4.0 Originator: (not set) Creation date: {} "#, From a063ba109429a40e8ccc61b2e2d59926ae48a7b1 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Thu, 6 Jun 2024 07:15:53 -0600 Subject: [PATCH 14/17] Changed planetary benchmark function to avoid the clone issue --- anise/benches/crit_planetary_data.rs | 6 +++++- anise/src/almanac/aer.rs | 6 ++---- anise/src/structure/dataset/mod.rs | 4 ++-- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/anise/benches/crit_planetary_data.rs b/anise/benches/crit_planetary_data.rs index 3d29fb1a..83caf112 100644 --- a/anise/benches/crit_planetary_data.rs +++ b/anise/benches/crit_planetary_data.rs @@ -3,6 +3,10 @@ use std::path::PathBuf; use anise::{constants::frames::EARTH_ITRF93, naif::kpl::parser::convert_tpc, prelude::*}; use criterion::{black_box, criterion_group, criterion_main, Criterion}; +fn benchmark_fetch(almanac: &Almanac, frame: Frame) { + black_box(almanac.frame_from_uid(frame).unwrap()); +} + pub fn criterion_benchmark(c: &mut Criterion) { let pca = PathBuf::from_str("pck11.pca").unwrap(); let planetary_data = convert_tpc("../data/pck00011.tpc", "../data/gm_de431.tpc").unwrap(); @@ -11,7 +15,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { let almanac = Almanac::new("pck11.pca").unwrap(); c.bench_function("Frame fetch from planetary dataset", |b| { - b.iter(|| black_box(almanac.clone().frame_from_uid(EARTH_ITRF93).unwrap())) + b.iter(|| benchmark_fetch(&almanac, EARTH_ITRF93)) }); } diff --git a/anise/src/almanac/aer.rs b/anise/src/almanac/aer.rs index 518e4b90..5a89be16 100644 --- a/anise/src/almanac/aer.rs +++ b/anise/src/almanac/aer.rs @@ -161,13 +161,11 @@ mod ut_aer { let eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); // Now iterate the trajectory to generate the measurements. - let gmat_ranges_km = vec![ - 9.145_755_787_575_61e4, + let gmat_ranges_km = [9.145_755_787_575_61e4, 9.996_505_560_799_869e4, 1.073_229_118_411_670_2e5, 1.145_516_751_191_464_7e5, - 1.265_739_190_638_930_7e5, - ]; + 1.265_739_190_638_930_7e5]; let states = [ CartesianState::new( diff --git a/anise/src/structure/dataset/mod.rs b/anise/src/structure/dataset/mod.rs index bffc9fa7..a5aa7758 100644 --- a/anise/src/structure/dataset/mod.rs +++ b/anise/src/structure/dataset/mod.rs @@ -123,7 +123,7 @@ impl DataSet { /// Compute the CRC32 of the underlying bytes pub fn crc32(&self) -> u32 { let bytes = self.build_data_seq().1; - crc32fast::hash(&bytes.as_bytes()) + crc32fast::hash(bytes.as_bytes()) } /// Sets the checksum of this data. @@ -588,7 +588,7 @@ mod dataset_ut { srp_sc.encode_to_vec(&mut this_buf).unwrap(); // Copy into the packed buffer for (i, byte) in this_buf.iter().enumerate() { - packed_buf[i + end_idx as usize] = *byte; + packed_buf[i + end_idx] = *byte; } let srp_sc_entry = end_idx..end_idx + this_buf.len(); // Check that we can decode the next entry From e14c89a6db579a77c502c3fe346ae7b09e038e4c Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Thu, 6 Jun 2024 08:56:39 -0600 Subject: [PATCH 15/17] Update to v0.4 cloud url --- .github/workflows/benchmarks.yml | 2 +- .github/workflows/python.yml | 8 +- .github/workflows/rust.yml | 14 +- README.md | 6 +- ...SPICE and ANISE files (meta almanac).ipynb | 1176 ++++++++--------- ...e kernels and text planetary kernels.ipynb | 672 +++++----- anise/README.md | 6 +- anise/src/almanac/aer.rs | 6 +- anise/src/almanac/metaload/metaalmanac.rs | 4 +- data/example_meta.dhall | 2 +- 10 files changed, 949 insertions(+), 947 deletions(-) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index d435c4bc..ed02cdf9 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -23,7 +23,7 @@ jobs: wget -O data/de430.bsp http://public-data.nyxspace.com/anise/de430.bsp wget -O data/de440s.bsp http://public-data.nyxspace.com/anise/de440s.bsp wget -O data/de440.bsp http://public-data.nyxspace.com/anise/de440.bsp - wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.3/pck08.pca + wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.4/pck08.pca wget -O data/gmat-hermite.bsp http://public-data.nyxspace.com/anise/ci/gmat-hermite.bsp wget -O data/variable-seg-size-hermite.bsp http://public-data.nyxspace.com/anise/ci/variable-seg-size-hermite.bsp wget -O data/earth_latest_high_prec.bpc http://public-data.nyxspace.com/anise/ci/earth_latest_high_prec-2023-09-08.bpc diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index afc9e5a1..36f69c76 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -33,8 +33,8 @@ jobs: wget -O data/de430.bsp http://public-data.nyxspace.com/anise/de430.bsp wget -O data/de440s.bsp http://public-data.nyxspace.com/anise/de440s.bsp wget -O data/de440.bsp http://public-data.nyxspace.com/anise/de440.bsp - wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.3/pck08.pca - wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.3/pck11.pca + wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.4/pck08.pca + wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.4/pck11.pca - uses: actions/setup-python@v5 with: @@ -197,8 +197,8 @@ jobs: wget -O data/de430.bsp http://public-data.nyxspace.com/anise/de430.bsp wget -O data/de440s.bsp http://public-data.nyxspace.com/anise/de440s.bsp wget -O data/de440.bsp http://public-data.nyxspace.com/anise/de440.bsp - wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.3/pck08.pca - wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.3/pck11.pca + wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.4/pck08.pca + wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.4/pck11.pca - name: Build sdist uses: PyO3/maturin-action@v1 diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 414aae2f..f8bd2a99 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -40,9 +40,9 @@ jobs: wget -O data/de430.bsp http://public-data.nyxspace.com/anise/de430.bsp wget -O data/de440s.bsp http://public-data.nyxspace.com/anise/de440s.bsp wget -O data/de440.bsp http://public-data.nyxspace.com/anise/de440.bsp - wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.3/pck08.pca - wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.3/pck11.pca - wget -O data/moon_fk.epa http://public-data.nyxspace.com/anise/v0.3/moon_fk.epa + wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.4/pck08.pca + wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.4/pck11.pca + wget -O data/moon_fk.epa http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa wget -O data/moon_pa_de440_200625.bpc http://public-data.nyxspace.com/anise/moon_pa_de440_200625.bpc wget -O data/gmat-hermite.bsp http://public-data.nyxspace.com/anise/ci/gmat-hermite.bsp wget -O data/variable-seg-size-hermite.bsp http://public-data.nyxspace.com/anise/ci/variable-seg-size-hermite.bsp @@ -92,8 +92,8 @@ jobs: wget -O data/de430.bsp http://public-data.nyxspace.com/anise/de430.bsp wget -O data/de440s.bsp http://public-data.nyxspace.com/anise/de440s.bsp wget -O data/de440.bsp http://public-data.nyxspace.com/anise/de440.bsp - wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.3/pck08.pca - wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.3/pck11.pca + wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.4/pck08.pca + wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.4/pck11.pca wget -O data/gmat-hermite.bsp http://public-data.nyxspace.com/anise/ci/gmat-hermite.bsp wget -O data/variable-seg-size-hermite.bsp http://public-data.nyxspace.com/anise/ci/variable-seg-size-hermite.bsp wget -O data/earth_latest_high_prec.bpc http://public-data.nyxspace.com/anise/ci/earth_latest_high_prec-2023-09-08.bpc @@ -155,8 +155,8 @@ jobs: wget -O data/de430.bsp http://public-data.nyxspace.com/anise/de430.bsp wget -O data/de440s.bsp http://public-data.nyxspace.com/anise/de440s.bsp wget -O data/de440.bsp http://public-data.nyxspace.com/anise/de440.bsp - wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.3/pck08.pca - wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.3/pck11.pca + wget -O data/pck08.pca http://public-data.nyxspace.com/anise/v0.4/pck08.pca + wget -O data/pck11.pca http://public-data.nyxspace.com/anise/v0.4/pck11.pca wget -O data/gmat-hermite.bsp http://public-data.nyxspace.com/anise/ci/gmat-hermite.bsp wget -O data/variable-seg-size-hermite.bsp http://public-data.nyxspace.com/anise/ci/variable-seg-size-hermite.bsp wget -O data/earth_latest_high_prec.bpc http://public-data.nyxspace.com/anise/ci/earth_latest_high_prec-2023-09-08.bpc diff --git a/README.md b/README.md index 688f1f64..8a5eec49 100644 --- a/README.md +++ b/README.md @@ -58,9 +58,9 @@ For convenience, Nyx Space provides a few important SPICE files on a public buck + [de440s.bsp](http://public-data.nyxspace.com/anise/de440s.bsp): JPL's latest ephemeris dataset from 1900 until 20250 + [de440.bsp](http://public-data.nyxspace.com/anise/de440.bsp): JPL's latest long-term ephemeris dataset -+ [pck08.pca](http://public-data.nyxspace.com/anise/v0.3/pck08.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00008.tpc](http://public-data.nyxspace.com/anise/pck00008.tpc) -+ [pck11.pca](http://public-data.nyxspace.com/anise/v0.3/pck11.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00011.tpc](http://public-data.nyxspace.com/anise/pck00011.tpc) -+ [moon_fk.epa](http://public-data.nyxspace.com/anise/v0.3/moon_fk.epa): Euler Parameter ANISE (`epa`) kernel, built from the JPL Moon Frame Kernel `moon_080317.txt` ++ [pck08.pca](http://public-data.nyxspace.com/anise/v0.4/pck08.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00008.tpc](http://public-data.nyxspace.com/anise/pck00008.tpc) ++ [pck11.pca](http://public-data.nyxspace.com/anise/v0.4/pck11.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00011.tpc](http://public-data.nyxspace.com/anise/pck00011.tpc) ++ [moon_fk.epa](http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa): Euler Parameter ANISE (`epa`) kernel, built from the JPL Moon Frame Kernel `moon_080317.txt` You may load any of these using the `load()` shortcut that will determine the file type upon loading, e.g. `let almanac = Almanac::new("pck08.pca").unwrap();` or in Python `almanac = Almanac("pck08.pca")`. To automatically download remote assets, from the Nyx Cloud or elsewhere, use the MetaAlmanac: `almanac = MetaAlmanac("ci_config.dhall").process()` in Python. diff --git a/anise-py/tutorials/Tutorial 02 - Loading remote SPICE and ANISE files (meta almanac).ipynb b/anise-py/tutorials/Tutorial 02 - Loading remote SPICE and ANISE files (meta almanac).ipynb index b030c32f..a2113153 100644 --- a/anise-py/tutorials/Tutorial 02 - Loading remote SPICE and ANISE files (meta almanac).ipynb +++ b/anise-py/tutorials/Tutorial 02 - Loading remote SPICE and ANISE files (meta almanac).ipynb @@ -1,589 +1,589 @@ { - "cells": [ - { - "cell_type": "markdown", - "id": "9055aeed-f362-402e-a504-3b20e38feff4", - "metadata": {}, - "source": [ - "# ANISE\n", - "\n", - "ANISE is a modern rewrite of NAIF SPICE, written in Rust and providing interfaces to other languages include Python.\n", - "\n", - "Evidently, this tutorial applies to the Python usage of ANISE.\n", - "\n", - "## Goal\n", - "By the end of this tutorial, you should be able to know how to load local and remotes files using the `MetaAlmanac` structure, and know how to save and reload that meta configuration.\n", - "\n", - "Let's start by installing ANISE: `pip install anise`" - ] - }, - { - "cell_type": "markdown", - "id": "0ac23885-e0ff-4e38-94a3-85c247befd09", - "metadata": {}, - "source": [ - "## Introduction\n", - "\n", - "SPICE files, such as development ephemerides, are often substantial in size. Typically, they are stored on shared resources, with links circulated among teams who require access. This process can be cumbersome for end-users, who must ensure they are using the correct file and that it loads properly, irrespective of the script’s execution path.\n", - "\n", - "The `MetaAlmanac` addresses this challenge by facilitating the initialization of an Almanac using both local and remote files. Remote files are automatically downloaded to the user's application data cache folder (`AppData` on Windows, `~/.local/share/cache` on Linux). For these files, the MetaAlmanac verifies the integrity of any local copy by comparing its CRC32 checksum with that of the remote file.\n", - "\n", - "Furthermore, the MetaAlmanac guarantees the use of the most up-to-date versions of these files. An example is the daily Earth Orientation Parameters published by JPL, termed the \"high precision Earth rotation\" kernel. The MetaAlmanac enables users to seamlessly access the latest version of these files." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "28340bbd-490f-4aef-9ada-30c1d4017bcf", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\u001b[0;31mInit signature:\u001b[0m \u001b[0mMetaAlmanac\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmaybe_path\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mDocstring:\u001b[0m \n", - "A structure to set up an Almanac, with automatic downloading, local storage, checksum checking, and more.\n", - "\n", - "# Behavior\n", - "If the URI is a local path, relative or absolute, nothing will be fetched from a remote. Relative paths are relative to the execution folder (i.e. the current working directory).\n", - "If the URI is a remote path, the MetaAlmanac will first check if the file exists locally. If it exists, it will check that the CRC32 checksum of this file matches that of the specs.\n", - "If it does not match, the file will be downloaded again. If no CRC32 is provided but the file exists, then the MetaAlmanac will fetch the remote file and overwrite the existing file.\n", - "The downloaded path will be stored in the \"AppData\" folder.\n", - "\u001b[0;31mFile:\u001b[0m ~/Workspace/nyx-space/anise/anise-py/.venv/lib64/python3.11/site-packages/anise/__init__.py\n", - "\u001b[0;31mType:\u001b[0m type\n", - "\u001b[0;31mSubclasses:\u001b[0m " - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from anise import MetaAlmanac\n", - "\n", - "MetaAlmanac?" - ] - }, - { - "cell_type": "markdown", - "id": "f1934f96-030f-4008-bea5-59ba1fed60de", - "metadata": {}, - "source": [ - "## MetaAlmanac configuration\n", - "\n", - "As seen in the previous cell, a MetaAlmanac is typically initialized using a path to a configuration file (the `maybe_path` argument).\n", - "\n", - "The configuration file is written in [`Dhall`](https://dhall-lang.org/), an exceptional configuration language known for its safety, expressiveness, and maintainability. Dhall's design simplifies the process of configuring complex systems, making it a standout choice for tasks like initializing the MetaAlmanac.\n", - "\n", - "Let's see what this looks like:" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "516ce346-1285-48cc-9940-8bb90620e993", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-- Latest planetary ephemerides, planetary constants, high precision Moon rotation, and daily Earth orientation parameter\n", - "{ files =\n", - "[ { crc32 = Some 1921414410\n", - ", uri = \"http://public-data.nyxspace.com/anise/de440s.bsp\"\n", - "}\n", - ", { crc32 = Some 2899443223\n", - ", uri = \"http://public-data.nyxspace.com/anise/v0.3/pck11.pca\"\n", - "}\n", - ", { crc32 = Some 2133296540\n", - ", uri = \"http://public-data.nyxspace.com/anise/v0.3/moon_fk.epa\"\n", - "}\n", - ", { crc32 = Some 1817759242\n", - ", uri = \"http://public-data.nyxspace.com/anise/moon_pa_de440_200625.bpc\"\n", - "}\n", - ", { crc32 = None Natural\n", - ", uri =\n", - "\"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc\"\n", - "}\n", - "]\n", - "}\n" - ] - } - ], - "source": [ - "with open(\"../../data/latest.dhall\") as f:\n", - " for line in f.readlines():\n", - " print(line.strip())" - ] - }, - { - "cell_type": "markdown", - "id": "6076d240-713d-4a42-8dfa-a718c222e3a7", - "metadata": {}, - "source": [ - "This is the default MetaAlmanac: it grabs the development ephemerides `DE440s` from the public cloud of Nyx Space, grab a copy of the high fidelity Moon Principal Axes frame, grabs the planetary constant ANISE file `pck08.pca`. It also downloads the latest high precision Earth rotation parameters from JPL.\n", - "\n", - "The MetaAlmanac comes with a shortcut to download the latest info above without needing a copy of the Dhall file." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "97824317-42a6-4a5b-8424-7411a2d62ab7", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "=== BPC #0 ===\n", - "β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”\n", - "β”‚ Name β”‚ Start epoch β”‚ End epoch β”‚ Duration β”‚ Interpolation kind β”‚ Frame β”‚ Inertial frame β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2000-01-01T00:01:04.183912847 TDB β”‚ 2002-09-26T21:18:50.632952778 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2002-09-26T21:18:50.632952778 TDB β”‚ 2005-06-22T18:36:37.081996238 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2005-06-22T18:36:37.081996238 TDB β”‚ 2008-03-18T15:54:23.531035669 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€οΏ½οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2008-03-18T15:54:23.531035669 TDB β”‚ 2010-12-13T13:12:09.980072814 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2010-12-13T13:12:09.980072814 TDB β”‚ 2013-09-08T10:29:56.429117874 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2013-09-08T10:29:56.429117874 TDB β”‚ 2016-06-04T07:47:42.878162558 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2016-06-04T07:47:42.878162558 TDB β”‚ 2019-03-01T05:05:29.327196302 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2019-03-01T05:05:29.327196302 TDB β”‚ 2021-11-25T02:23:15.776233885 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ Earth PCK, ITRF93 Frame β”‚ 2021-11-25T02:23:15.776233885 TDB β”‚ 2024-04-19T00:01:09.185602312 TDB β”‚ 875 days 21 h 37 min 53 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", - "β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜\n", - "=== BPC #1 ===\n", - "β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€οΏ½οΏ½β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”\n", - "β”‚ Name β”‚ Start epoch β”‚ End epoch β”‚ Duration β”‚ Interpolation kind β”‚ Frame β”‚ Inertial frame β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ de440.nio β”‚ 1449-12-26T23:59:59.999860483 TDB β”‚ 2426-02-15T23:59:59.999891009 TDB β”‚ 320000 days β”‚ Chebyshev Triplet β”‚ 31008 β”‚ 1 β”‚\n", - "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€οΏ½οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", - "β”‚ de440.nio β”‚ 2426-02-15T23:59:59.999891009 TDB β”‚ 2650-01-24T23:59:59.999798018 TDB β”‚ 81792 days β”‚ Chebyshev Triplet β”‚ 31008 β”‚ 1 β”‚\n", - "β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜\n" - ] - } - ], - "source": [ - "almanac = MetaAlmanac.latest()\n", - "almanac.describe(bpc=True)" - ] - }, - { - "cell_type": "markdown", - "id": "c1f6515d-3018-4a90-a617-dfe98aae1add", - "metadata": {}, - "source": [ - "The data downloaded from Nyx Space cloud has a checksum in the configuration file: that's because we know exactly what this data should be. Hence, if the data is modified in your local copy, the MetaAlmanac will download it again and replace your local copy. However, the JPL data changes daily, so we don't store a checksum in the config file, ensuring that the latest data is always downloaded.\n", - "\n", - "ANISE also provides a local config file to use the data stored in a copy of the repo." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "f748993a-744f-4e9a-be4f-0e637c605486", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-- Default Almanac\n", - "{ files =\n", - "[ { crc32 = None Natural, uri = \"../../data/de440s.bsp\" }\n", - ", { crc32 = None Natural, uri = \"../../data/pck08.pca\" }\n", - "]\n", - "}\n" - ] - } - ], - "source": [ - "with open(\"../../data/local.dhall\") as f:\n", - " for line in f.readlines():\n", - " print(line.strip())" - ] - }, - { - "cell_type": "markdown", - "id": "44d856cb-10c2-42d0-ab15-614b84538b4a", - "metadata": {}, - "source": [ - "The CRC32 integrity number is not set for local paths because in any case, the MetaAlmanac does not know where to fetch another version where the checksum should match." - ] - }, - { - "cell_type": "markdown", - "id": "67f031fe-6a51-4aa0-a0c4-619669d04b0e", - "metadata": {}, - "source": [ - "## Using the MetaAlmanac\n", - "\n", - "The MetaAlmanac is designed to work seamlessly with the Alamac itself. In the following example, we'll use the latest MetaAlmanac (the same that's in `latest.dhall`) and see that it can be used to return the Almanac directly." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "5d07fd6b-b498-4f33-85f1-9e243a4f0ab1", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MetaAlmanac { files: [MetaFile { uri: \"http://public-data.nyxspace.com/anise/de440s.bsp\", crc32: Some(1921414410) }, MetaFile { uri: \"http://public-data.nyxspace.com/anise/v0.3/pck11.pca\", crc32: Some(2899443223) }, MetaFile { uri: \"http://public-data.nyxspace.com/anise/v0.3/moon_fk.epa\", crc32: Some(2133296540) }, MetaFile { uri: \"http://public-data.nyxspace.com/anise/moon_pa_de440_200625.bpc\", crc32: Some(1817759242) }, MetaFile { uri: \"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc\", crc32: None }] }\n" - ] - } - ], - "source": [ - "meta = MetaAlmanac.load(\"../../data/latest.dhall\")\n", - "print(meta)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "550ec288-15bb-4652-a50c-278d59492bff", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Almanac: #SPK = 1\t#BPC = 2\tPlanetaryData with 54 ID mappings and 0 name mappings\tEulerParameterData with 3 ID mappings and 3 name mappings\n" - ] - } - ], - "source": [ - "almanac = meta.process()\n", - "print(almanac)" - ] - }, - { - "cell_type": "markdown", - "id": "79813cf7-b7ad-4d56-b315-508a438035f9", - "metadata": {}, - "source": [ - "Trivial! We now see that the loaded Almanac has one loaded SPK file (the de440s file), one BPC file (the latest Earth high precision rotation), and 49 planetary data mappings, loaded from the `pck08.pca`.\n", - "\n", - "Even simpler, you can just call the `latest()` class method which will call the `latest.dhall` equivalent, without requiring a local configuration file." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "8826c9a5-a50d-4c93-90b8-1ae45074d058", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Almanac: #SPK = 1\t#BPC = 2\tPlanetaryData with 54 ID mappings and 0 name mappings\tEulerParameterData with 3 ID mappings and 3 name mappings\n" - ] - } - ], - "source": [ - "print(MetaAlmanac.latest())" - ] - }, - { - "cell_type": "markdown", - "id": "3cb4db3f-09a4-4ad6-b32d-a7c2c9ba8e3c", - "metadata": {}, - "source": [ - "## Building a MetaAlmanac config\n", - "\n", - "Building a Dhall configuration for ANISE can be approached in two ways. The most direct method is to craft a Dhall file manually. However, given the complexity often associated with writing in Dhall, ANISE offers a more user-friendly alternative through the `MetaFile` class. This option simplifies the process of creating the necessary data, catering to users who may find direct Dhall scripting challenging." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "fae8a0c8-15e7-4e98-aaf3-ba0859c64121", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\u001b[0;31mInit signature:\u001b[0m \u001b[0mMetaFile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0muri\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcrc32\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mFile:\u001b[0m ~/Workspace/nyx-space/anise/anise-py/.venv/lib64/python3.11/site-packages/anise/__init__.py\n", - "\u001b[0;31mType:\u001b[0m type\n", - "\u001b[0;31mSubclasses:\u001b[0m " - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from anise import MetaFile\n", - "MetaFile?" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "423b2032-a36c-4863-ba6e-ebd09ed90508", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Almanac: #SPK = 0\t#BPC = 0 (@0x5582525a9d10)" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Let's initialize a new empty MetaAlmanac.\n", - "new_meta = MetaAlmanac()\n", - "new_meta.process() # Note that you can always initialize an empty MetaAlmanac because you can initialize an empty Almanac" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "88479168-6c60-438d-958e-86c757a59f04", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'{ files = [{ crc32 = None Natural, uri = \"../../data/de440s.bsp\" }, { crc32 = None Natural, uri = \"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/moon_pa_de440_200625.bpc\" }, { crc32 = None Natural, uri = \"https://google.com/non/existing/pck08.pca\" }] }'" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Create MetaFile instances\n", - "local_de = MetaFile(\"../../data/de440s.bsp\")\n", - "jpl_moon_rotation = MetaFile(\"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/moon_pa_de440_200625.bpc\")\n", - "non_existing = MetaFile(\"https://google.com/non/existing/pck08.pca\")\n", - "# Add them to the meta almanac\n", - "new_meta.files = [local_de, jpl_moon_rotation, non_existing]\n", - "# And print what this configuration would be:\n", - "new_meta.dump()" - ] - }, - { - "cell_type": "markdown", - "id": "a8e243d7-1be5-401d-a732-46273e23327e", - "metadata": {}, - "source": [ - "Note that the MetaAlmanac will raise an error in case it cannot download the files. Let's try to load the configuration we just specified." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "b2f5c44c-473c-4ed3-8d92-18ac3eaa8dca", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\u001b[0;31mSignature:\u001b[0m \u001b[0mnon_existing\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprocess\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mDocstring:\u001b[0m\n", - "Processes this MetaFile by downloading it if it's a URL.\n", - "\n", - "This function modified `self` and changes the URI to be the path to the downloaded file.\n", - "\u001b[0;31mType:\u001b[0m builtin_function_or_method" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# You can call a specific file's `process` method to handle this specific file\n", - "non_existing.process?" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "7d58dc71-0a96-477d-9a41-c7e201704042", - "metadata": {}, - "outputs": [], - "source": [ - "# This does nothing because it's a local file\n", - "local_de.process()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "83437378-9b44-4fcd-a6e8-e774bf098534", - "metadata": {}, - "outputs": [ - { - "ename": "Exception", - "evalue": "fetching https://google.com/non/existing/pck08.pca returned 404 Not Found", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[13], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Trying to download the non existing file will throw an exception\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43mnon_existing\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mprocess\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mException\u001b[0m: fetching https://google.com/non/existing/pck08.pca returned 404 Not Found" - ] - } - ], - "source": [ - "# Trying to download the non existing file will throw an exception\n", - "non_existing.process()" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "e2c35dc9-4efe-4348-ac81-cb7384cf8faf", - "metadata": {}, - "outputs": [ - { - "ename": "Exception", - "evalue": "processing file #2 (MetaFile { uri: \"https://google.com/non/existing/pck08.pca\", crc32: None }) caused an error: fetching https://google.com/non/existing/pck08.pca returned 404 Not Found", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[14], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Trying to process the full meta almanac with an erroneous meta file will also throw an exception.\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43mnew_meta\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mprocess\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mException\u001b[0m: processing file #2 (MetaFile { uri: \"https://google.com/non/existing/pck08.pca\", crc32: None }) caused an error: fetching https://google.com/non/existing/pck08.pca returned 404 Not Found" - ] - } - ], - "source": [ - "# Trying to process the full meta almanac with an erroneous meta file will also throw an exception.\n", - "new_meta.process()" - ] - }, - { - "cell_type": "markdown", - "id": "bbd9e752-a045-4a50-bf65-0fe455b84f0a", - "metadata": {}, - "source": [ - "## Directly loading a MetaFile\n", - "\n", - "Say you have an `Almanac` loaded with some of your files. And then you'd like to load one file that is stored remotely. You may do so directly with the `load_from_metafile` method of the Almanac.\n", - "\n", - "**Note:** an Almanac may only load _one_ planetary constants kernel and _one_ Euler parameters kernel. As such, if your Almanac already includes one of these, loading another one will _replace it_. Refer to tutorial #05 on how to build your own PCA and EPA files that include everything you need." - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "577dde0e-9dd5-41b5-9263-0f97fdb78628", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Almanac: #SPK = 1\t#BPC = 0\tPlanetaryData with 54 ID mappings and 0 name mappings\n", - "Almanac: #SPK = 1\t#BPC = 0\tPlanetaryData with 54 ID mappings and 0 name mappings\tEulerParameterData with 3 ID mappings and 3 name mappings\n" - ] - } - ], - "source": [ - "from anise import Almanac\n", - "only_de440s = Almanac(\"../../data/de440s.bsp\").load(\"../../data/pck11.pca\")\n", - "print(only_de440s)\n", - "# Now load a PCA from the Nyx Space cloud\n", - "de440s_and_moon = only_de440s.load_from_metafile(MetaFile(\"http://public-data.nyxspace.com/anise/v0.3/moon_fk.epa\", 2133296540))\n", - "print(de440s_and_moon)" - ] - }, - { - "cell_type": "markdown", - "id": "22637388-6a44-42cb-8722-38bf0d570a82", - "metadata": {}, - "source": [ - "To confirm that we've loaded the Moon FK file, let's grab the frame info from the Moon ME and Moon PA frames." - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "3c0466bd-fca6-49e8-9aca-33032bbd4306", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Moon MOON_ME (ΞΌ = 4902.800066163796 km^3/s^2, radius = 1737.4 km)\n", - "Moon MOON_PA (ΞΌ = 4902.800066163796 km^3/s^2, radius = 1737.4 km)\n" - ] - } - ], - "source": [ - "from anise.astro.constants import Frames\n", - "print(de440s_and_moon.frame_info(Frames.MOON_ME_FRAME))\n", - "print(de440s_and_moon.frame_info(Frames.MOON_PA_FRAME))" - ] - }, - { - "cell_type": "markdown", - "id": "a8f2d315-6956-4236-bbf9-67b216ea16bf", - "metadata": {}, - "source": [ - "## Exercises\n", - "\n", - "### Learning Goals:\n", - "\n", - "1. Understand the structure and syntax of Dhall configuration files.\n", - "1. Learn how to use the MetaFile tool for easier configuration creation.\n", - "1. Gain insight into how different configurations affect the MetaAlmanac's operation.\n", - "\n", - "\n", - "### 1. Manual Dhall Configuration:\n", - "\n", - "+ Create a simple Dhall configuration file.\n", - "+ Include basic elements like a specific ephemerides file and a custom planetary constant file.\n", - "+ Load this configuration into the MetaAlmanac, observe the behavior, and query the loaded Almanac itself.\n", - "\n", - "### 2. Using MetaFile:\n", - "\n", - "+ Use the `MetaFile` class to generate the same configuration.\n", - "+ Compare the process of using MetaFile with manual Dhall file creation.\n", - "+ Load the generated configuration into the MetaAlmanac.\n", - "\n", - "_Note:_ Almanac, MetaFile, and MetaAlmana all support the equality operation in Python.\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": ".venv" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} + "cells": [ + { + "cell_type": "markdown", + "id": "9055aeed-f362-402e-a504-3b20e38feff4", + "metadata": {}, + "source": [ + "# ANISE\n", + "\n", + "ANISE is a modern rewrite of NAIF SPICE, written in Rust and providing interfaces to other languages include Python.\n", + "\n", + "Evidently, this tutorial applies to the Python usage of ANISE.\n", + "\n", + "## Goal\n", + "By the end of this tutorial, you should be able to know how to load local and remotes files using the `MetaAlmanac` structure, and know how to save and reload that meta configuration.\n", + "\n", + "Let's start by installing ANISE: `pip install anise`" + ] + }, + { + "cell_type": "markdown", + "id": "0ac23885-e0ff-4e38-94a3-85c247befd09", + "metadata": {}, + "source": [ + "## Introduction\n", + "\n", + "SPICE files, such as development ephemerides, are often substantial in size. Typically, they are stored on shared resources, with links circulated among teams who require access. This process can be cumbersome for end-users, who must ensure they are using the correct file and that it loads properly, irrespective of the script’s execution path.\n", + "\n", + "The `MetaAlmanac` addresses this challenge by facilitating the initialization of an Almanac using both local and remote files. Remote files are automatically downloaded to the user's application data cache folder (`AppData` on Windows, `~/.local/share/cache` on Linux). For these files, the MetaAlmanac verifies the integrity of any local copy by comparing its CRC32 checksum with that of the remote file.\n", + "\n", + "Furthermore, the MetaAlmanac guarantees the use of the most up-to-date versions of these files. An example is the daily Earth Orientation Parameters published by JPL, termed the \"high precision Earth rotation\" kernel. The MetaAlmanac enables users to seamlessly access the latest version of these files." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "28340bbd-490f-4aef-9ada-30c1d4017bcf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\u001b[0;31mInit signature:\u001b[0m \u001b[0mMetaAlmanac\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmaybe_path\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mDocstring:\u001b[0m \n", + "A structure to set up an Almanac, with automatic downloading, local storage, checksum checking, and more.\n", + "\n", + "# Behavior\n", + "If the URI is a local path, relative or absolute, nothing will be fetched from a remote. Relative paths are relative to the execution folder (i.e. the current working directory).\n", + "If the URI is a remote path, the MetaAlmanac will first check if the file exists locally. If it exists, it will check that the CRC32 checksum of this file matches that of the specs.\n", + "If it does not match, the file will be downloaded again. If no CRC32 is provided but the file exists, then the MetaAlmanac will fetch the remote file and overwrite the existing file.\n", + "The downloaded path will be stored in the \"AppData\" folder.\n", + "\u001b[0;31mFile:\u001b[0m ~/Workspace/nyx-space/anise/anise-py/.venv/lib64/python3.11/site-packages/anise/__init__.py\n", + "\u001b[0;31mType:\u001b[0m type\n", + "\u001b[0;31mSubclasses:\u001b[0m " + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from anise import MetaAlmanac\n", + "\n", + "MetaAlmanac?" + ] + }, + { + "cell_type": "markdown", + "id": "f1934f96-030f-4008-bea5-59ba1fed60de", + "metadata": {}, + "source": [ + "## MetaAlmanac configuration\n", + "\n", + "As seen in the previous cell, a MetaAlmanac is typically initialized using a path to a configuration file (the `maybe_path` argument).\n", + "\n", + "The configuration file is written in [`Dhall`](https://dhall-lang.org/), an exceptional configuration language known for its safety, expressiveness, and maintainability. Dhall's design simplifies the process of configuring complex systems, making it a standout choice for tasks like initializing the MetaAlmanac.\n", + "\n", + "Let's see what this looks like:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "516ce346-1285-48cc-9940-8bb90620e993", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-- Latest planetary ephemerides, planetary constants, high precision Moon rotation, and daily Earth orientation parameter\n", + "{ files =\n", + "[ { crc32 = Some 1921414410\n", + ", uri = \"http://public-data.nyxspace.com/anise/de440s.bsp\"\n", + "}\n", + ", { crc32 = Some 2899443223\n", + ", uri = \"http://public-data.nyxspace.com/anise/v0.4/pck11.pca\"\n", + "}\n", + ", { crc32 = Some 2133296540\n", + ", uri = \"http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa\"\n", + "}\n", + ", { crc32 = Some 1817759242\n", + ", uri = \"http://public-data.nyxspace.com/anise/moon_pa_de440_200625.bpc\"\n", + "}\n", + ", { crc32 = None Natural\n", + ", uri =\n", + "\"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc\"\n", + "}\n", + "]\n", + "}\n" + ] + } + ], + "source": [ + "with open(\"../../data/latest.dhall\") as f:\n", + " for line in f.readlines():\n", + " print(line.strip())" + ] + }, + { + "cell_type": "markdown", + "id": "6076d240-713d-4a42-8dfa-a718c222e3a7", + "metadata": {}, + "source": [ + "This is the default MetaAlmanac: it grabs the development ephemerides `DE440s` from the public cloud of Nyx Space, grab a copy of the high fidelity Moon Principal Axes frame, grabs the planetary constant ANISE file `pck08.pca`. It also downloads the latest high precision Earth rotation parameters from JPL.\n", + "\n", + "The MetaAlmanac comes with a shortcut to download the latest info above without needing a copy of the Dhall file." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "97824317-42a6-4a5b-8424-7411a2d62ab7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "=== BPC #0 ===\n", + "β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”\n", + "β”‚ Name β”‚ Start epoch β”‚ End epoch β”‚ Duration β”‚ Interpolation kind β”‚ Frame β”‚ Inertial frame β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2000-01-01T00:01:04.183912847 TDB β”‚ 2002-09-26T21:18:50.632952778 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2002-09-26T21:18:50.632952778 TDB β”‚ 2005-06-22T18:36:37.081996238 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2005-06-22T18:36:37.081996238 TDB β”‚ 2008-03-18T15:54:23.531035669 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€οΏ½οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2008-03-18T15:54:23.531035669 TDB β”‚ 2010-12-13T13:12:09.980072814 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2010-12-13T13:12:09.980072814 TDB β”‚ 2013-09-08T10:29:56.429117874 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2013-09-08T10:29:56.429117874 TDB β”‚ 2016-06-04T07:47:42.878162558 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2016-06-04T07:47:42.878162558 TDB β”‚ 2019-03-01T05:05:29.327196302 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2019-03-01T05:05:29.327196302 TDB β”‚ 2021-11-25T02:23:15.776233885 TDB β”‚ 999 days 21 h 17 min 46 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ Earth PCK, ITRF93 Frame β”‚ 2021-11-25T02:23:15.776233885 TDB β”‚ 2024-04-19T00:01:09.185602312 TDB β”‚ 875 days 21 h 37 min 53 s β”‚ Chebyshev Triplet β”‚ 3000 β”‚ 17 β”‚\n", + "β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜\n", + "=== BPC #1 ===\n", + "β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€οΏ½οΏ½β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”\n", + "β”‚ Name β”‚ Start epoch β”‚ End epoch β”‚ Duration β”‚ Interpolation kind β”‚ Frame β”‚ Inertial frame β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ de440.nio β”‚ 1449-12-26T23:59:59.999860483 TDB β”‚ 2426-02-15T23:59:59.999891009 TDB β”‚ 320000 days β”‚ Chebyshev Triplet β”‚ 31008 β”‚ 1 β”‚\n", + "β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€οΏ½οΏ½οΏ½β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€\n", + "β”‚ de440.nio β”‚ 2426-02-15T23:59:59.999891009 TDB β”‚ 2650-01-24T23:59:59.999798018 TDB β”‚ 81792 days β”‚ Chebyshev Triplet β”‚ 31008 β”‚ 1 β”‚\n", + "β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜\n" + ] + } + ], + "source": [ + "almanac = MetaAlmanac.latest()\n", + "almanac.describe(bpc=True)" + ] + }, + { + "cell_type": "markdown", + "id": "c1f6515d-3018-4a90-a617-dfe98aae1add", + "metadata": {}, + "source": [ + "The data downloaded from Nyx Space cloud has a checksum in the configuration file: that's because we know exactly what this data should be. Hence, if the data is modified in your local copy, the MetaAlmanac will download it again and replace your local copy. However, the JPL data changes daily, so we don't store a checksum in the config file, ensuring that the latest data is always downloaded.\n", + "\n", + "ANISE also provides a local config file to use the data stored in a copy of the repo." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "f748993a-744f-4e9a-be4f-0e637c605486", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-- Default Almanac\n", + "{ files =\n", + "[ { crc32 = None Natural, uri = \"../../data/de440s.bsp\" }\n", + ", { crc32 = None Natural, uri = \"../../data/pck08.pca\" }\n", + "]\n", + "}\n" + ] + } + ], + "source": [ + "with open(\"../../data/local.dhall\") as f:\n", + " for line in f.readlines():\n", + " print(line.strip())" + ] + }, + { + "cell_type": "markdown", + "id": "44d856cb-10c2-42d0-ab15-614b84538b4a", + "metadata": {}, + "source": [ + "The CRC32 integrity number is not set for local paths because in any case, the MetaAlmanac does not know where to fetch another version where the checksum should match." + ] + }, + { + "cell_type": "markdown", + "id": "67f031fe-6a51-4aa0-a0c4-619669d04b0e", + "metadata": {}, + "source": [ + "## Using the MetaAlmanac\n", + "\n", + "The MetaAlmanac is designed to work seamlessly with the Alamac itself. In the following example, we'll use the latest MetaAlmanac (the same that's in `latest.dhall`) and see that it can be used to return the Almanac directly." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "5d07fd6b-b498-4f33-85f1-9e243a4f0ab1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MetaAlmanac { files: [MetaFile { uri: \"http://public-data.nyxspace.com/anise/de440s.bsp\", crc32: Some(1921414410) }, MetaFile { uri: \"http://public-data.nyxspace.com/anise/v0.4/pck11.pca\", crc32: Some(2899443223) }, MetaFile { uri: \"http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa\", crc32: Some(2133296540) }, MetaFile { uri: \"http://public-data.nyxspace.com/anise/moon_pa_de440_200625.bpc\", crc32: Some(1817759242) }, MetaFile { uri: \"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc\", crc32: None }] }\n" + ] + } + ], + "source": [ + "meta = MetaAlmanac.load(\"../../data/latest.dhall\")\n", + "print(meta)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "550ec288-15bb-4652-a50c-278d59492bff", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Almanac: #SPK = 1\t#BPC = 2\tPlanetaryData with 54 ID mappings and 0 name mappings\tEulerParameterData with 3 ID mappings and 3 name mappings\n" + ] + } + ], + "source": [ + "almanac = meta.process()\n", + "print(almanac)" + ] + }, + { + "cell_type": "markdown", + "id": "79813cf7-b7ad-4d56-b315-508a438035f9", + "metadata": {}, + "source": [ + "Trivial! We now see that the loaded Almanac has one loaded SPK file (the de440s file), one BPC file (the latest Earth high precision rotation), and 49 planetary data mappings, loaded from the `pck08.pca`.\n", + "\n", + "Even simpler, you can just call the `latest()` class method which will call the `latest.dhall` equivalent, without requiring a local configuration file." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "8826c9a5-a50d-4c93-90b8-1ae45074d058", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Almanac: #SPK = 1\t#BPC = 2\tPlanetaryData with 54 ID mappings and 0 name mappings\tEulerParameterData with 3 ID mappings and 3 name mappings\n" + ] + } + ], + "source": [ + "print(MetaAlmanac.latest())" + ] + }, + { + "cell_type": "markdown", + "id": "3cb4db3f-09a4-4ad6-b32d-a7c2c9ba8e3c", + "metadata": {}, + "source": [ + "## Building a MetaAlmanac config\n", + "\n", + "Building a Dhall configuration for ANISE can be approached in two ways. The most direct method is to craft a Dhall file manually. However, given the complexity often associated with writing in Dhall, ANISE offers a more user-friendly alternative through the `MetaFile` class. This option simplifies the process of creating the necessary data, catering to users who may find direct Dhall scripting challenging." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "fae8a0c8-15e7-4e98-aaf3-ba0859c64121", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\u001b[0;31mInit signature:\u001b[0m \u001b[0mMetaFile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0muri\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcrc32\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mFile:\u001b[0m ~/Workspace/nyx-space/anise/anise-py/.venv/lib64/python3.11/site-packages/anise/__init__.py\n", + "\u001b[0;31mType:\u001b[0m type\n", + "\u001b[0;31mSubclasses:\u001b[0m " + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from anise import MetaFile\n", + "MetaFile?" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "423b2032-a36c-4863-ba6e-ebd09ed90508", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Almanac: #SPK = 0\t#BPC = 0 (@0x5582525a9d10)" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Let's initialize a new empty MetaAlmanac.\n", + "new_meta = MetaAlmanac()\n", + "new_meta.process() # Note that you can always initialize an empty MetaAlmanac because you can initialize an empty Almanac" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "88479168-6c60-438d-958e-86c757a59f04", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'{ files = [{ crc32 = None Natural, uri = \"../../data/de440s.bsp\" }, { crc32 = None Natural, uri = \"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/moon_pa_de440_200625.bpc\" }, { crc32 = None Natural, uri = \"https://google.com/non/existing/pck08.pca\" }] }'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Create MetaFile instances\n", + "local_de = MetaFile(\"../../data/de440s.bsp\")\n", + "jpl_moon_rotation = MetaFile(\"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/moon_pa_de440_200625.bpc\")\n", + "non_existing = MetaFile(\"https://google.com/non/existing/pck08.pca\")\n", + "# Add them to the meta almanac\n", + "new_meta.files = [local_de, jpl_moon_rotation, non_existing]\n", + "# And print what this configuration would be:\n", + "new_meta.dump()" + ] + }, + { + "cell_type": "markdown", + "id": "a8e243d7-1be5-401d-a732-46273e23327e", + "metadata": {}, + "source": [ + "Note that the MetaAlmanac will raise an error in case it cannot download the files. Let's try to load the configuration we just specified." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "b2f5c44c-473c-4ed3-8d92-18ac3eaa8dca", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\u001b[0;31mSignature:\u001b[0m \u001b[0mnon_existing\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprocess\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mDocstring:\u001b[0m\n", + "Processes this MetaFile by downloading it if it's a URL.\n", + "\n", + "This function modified `self` and changes the URI to be the path to the downloaded file.\n", + "\u001b[0;31mType:\u001b[0m builtin_function_or_method" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# You can call a specific file's `process` method to handle this specific file\n", + "non_existing.process?" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "7d58dc71-0a96-477d-9a41-c7e201704042", + "metadata": {}, + "outputs": [], + "source": [ + "# This does nothing because it's a local file\n", + "local_de.process()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "83437378-9b44-4fcd-a6e8-e774bf098534", + "metadata": {}, + "outputs": [ + { + "ename": "Exception", + "evalue": "fetching https://google.com/non/existing/pck08.pca returned 404 Not Found", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[13], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Trying to download the non existing file will throw an exception\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43mnon_existing\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mprocess\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mException\u001b[0m: fetching https://google.com/non/existing/pck08.pca returned 404 Not Found" + ] + } + ], + "source": [ + "# Trying to download the non existing file will throw an exception\n", + "non_existing.process()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "e2c35dc9-4efe-4348-ac81-cb7384cf8faf", + "metadata": {}, + "outputs": [ + { + "ename": "Exception", + "evalue": "processing file #2 (MetaFile { uri: \"https://google.com/non/existing/pck08.pca\", crc32: None }) caused an error: fetching https://google.com/non/existing/pck08.pca returned 404 Not Found", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[14], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Trying to process the full meta almanac with an erroneous meta file will also throw an exception.\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43mnew_meta\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mprocess\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mException\u001b[0m: processing file #2 (MetaFile { uri: \"https://google.com/non/existing/pck08.pca\", crc32: None }) caused an error: fetching https://google.com/non/existing/pck08.pca returned 404 Not Found" + ] + } + ], + "source": [ + "# Trying to process the full meta almanac with an erroneous meta file will also throw an exception.\n", + "new_meta.process()" + ] + }, + { + "cell_type": "markdown", + "id": "bbd9e752-a045-4a50-bf65-0fe455b84f0a", + "metadata": {}, + "source": [ + "## Directly loading a MetaFile\n", + "\n", + "Say you have an `Almanac` loaded with some of your files. And then you'd like to load one file that is stored remotely. You may do so directly with the `load_from_metafile` method of the Almanac.\n", + "\n", + "**Note:** an Almanac may only load _one_ planetary constants kernel and _one_ Euler parameters kernel. As such, if your Almanac already includes one of these, loading another one will _replace it_. Refer to tutorial #05 on how to build your own PCA and EPA files that include everything you need." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "577dde0e-9dd5-41b5-9263-0f97fdb78628", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Almanac: #SPK = 1\t#BPC = 0\tPlanetaryData with 54 ID mappings and 0 name mappings\n", + "Almanac: #SPK = 1\t#BPC = 0\tPlanetaryData with 54 ID mappings and 0 name mappings\tEulerParameterData with 3 ID mappings and 3 name mappings\n" + ] + } + ], + "source": [ + "from anise import Almanac\n", + "only_de440s = Almanac(\"../../data/de440s.bsp\").load(\"../../data/pck11.pca\")\n", + "print(only_de440s)\n", + "# Now load a PCA from the Nyx Space cloud\n", + "de440s_and_moon = only_de440s.load_from_metafile(MetaFile(\"http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa\", 2133296540))\n", + "print(de440s_and_moon)" + ] + }, + { + "cell_type": "markdown", + "id": "22637388-6a44-42cb-8722-38bf0d570a82", + "metadata": {}, + "source": [ + "To confirm that we've loaded the Moon FK file, let's grab the frame info from the Moon ME and Moon PA frames." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "3c0466bd-fca6-49e8-9aca-33032bbd4306", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Moon MOON_ME (ΞΌ = 4902.800066163796 km^3/s^2, radius = 1737.4 km)\n", + "Moon MOON_PA (ΞΌ = 4902.800066163796 km^3/s^2, radius = 1737.4 km)\n" + ] + } + ], + "source": [ + "from anise.astro.constants import Frames\n", + "print(de440s_and_moon.frame_info(Frames.MOON_ME_FRAME))\n", + "print(de440s_and_moon.frame_info(Frames.MOON_PA_FRAME))" + ] + }, + { + "cell_type": "markdown", + "id": "a8f2d315-6956-4236-bbf9-67b216ea16bf", + "metadata": {}, + "source": [ + "## Exercises\n", + "\n", + "### Learning Goals:\n", + "\n", + "1. Understand the structure and syntax of Dhall configuration files.\n", + "1. Learn how to use the MetaFile tool for easier configuration creation.\n", + "1. Gain insight into how different configurations affect the MetaAlmanac's operation.\n", + "\n", + "\n", + "### 1. Manual Dhall Configuration:\n", + "\n", + "+ Create a simple Dhall configuration file.\n", + "+ Include basic elements like a specific ephemerides file and a custom planetary constant file.\n", + "+ Load this configuration into the MetaAlmanac, observe the behavior, and query the loaded Almanac itself.\n", + "\n", + "### 2. Using MetaFile:\n", + "\n", + "+ Use the `MetaFile` class to generate the same configuration.\n", + "+ Compare the process of using MetaFile with manual Dhall file creation.\n", + "+ Load the generated configuration into the MetaAlmanac.\n", + "\n", + "_Note:_ Almanac, MetaFile, and MetaAlmana all support the equality operation in Python.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": ".venv" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file diff --git a/anise-py/tutorials/Tutorial 05 - Using frame kernels and text planetary kernels.ipynb b/anise-py/tutorials/Tutorial 05 - Using frame kernels and text planetary kernels.ipynb index 83dc439a..c1d3a8b7 100644 --- a/anise-py/tutorials/Tutorial 05 - Using frame kernels and text planetary kernels.ipynb +++ b/anise-py/tutorials/Tutorial 05 - Using frame kernels and text planetary kernels.ipynb @@ -1,338 +1,338 @@ { - "cells": [ - { - "cell_type": "markdown", - "id": "89f0eb3a-044d-4337-9cef-43a41cbef37c", - "metadata": {}, - "source": [ - "# ANISE\n", - "\n", - "ANISE is a modern rewrite of NAIF SPICE, written in Rust and providing interfaces to other languages including Python.\n", - "\n", - "## Goal\n", - "By the end of this tutorial, you should be able to know how to use your custom frame kernels (FK) and text planetary constant kernels (TPC) in ANISE.\n", - "\n", - "Let's start by installing ANISE: `pip install anise`" - ] - }, - { - "cell_type": "markdown", - "id": "1ee5f04e-762a-41d7-8d3b-ac537ae7e10c", - "metadata": {}, - "source": [ - "## Data structure\n", - "\n", - "SPICE supports text-based kernels. These allow for easy modification and inclusion of documentation directly in the text file. The way SPICE handles these is by keeping a file handler open on the file while it's loaded. This causes a number of issues, such as potential file locking problems, increased risk of data corruption in cases of unexpected software termination, and increased difficulty in managing concurrent access, especially pertinent in the context of onboard flight software. To prevent these issues, ANISE maintains a pointer to the data contained in these files in memory (via a memory mapping or a heap allocation). Moreover, ANISE ensures that the data it uses allows for immediate random access, which is a method of accessing data at any point in memory with equal speed, independent of its location, thereby speeding up searches within the kernel data. To facilitate this, the data is stored in a platform-independent binary structure using the ASN.1 DER specification (the telecommunications industry standard for 20+ years). ASN.1 offers advantages like well-defined data structures, robust encoding schemes, and ease of interoperability across different platforms, making it a reliable choice for storing kernel data.\n", - "\n", - "This approach allows ANISE to parse through the FK and TPC equivalents significantly faster than SPICE, while storing the same information.\n", - "\n", - "However, this also means that ANISE cannot load the text files directly. Instead, these must be converted into the ANISE format, respectively PCA and EPA for \"Planetary Constants ANISE\" kernel and \"Euler Parameter ANISE\" kernel. Euler parameters, also known as quaternion parameters, offer a compact and non-redundant representation of orientations in three dimensions, differing from quaternions in their normalization constraint and being slightly more robust to numerical errors in certain computational scenarios.\n", - "\n", - "For details about the data set structure, refer to the API documentation: [`DataSet`](https://docs.rs/anise/latest/anise/structure/dataset/struct.DataSet.html). The `EulerParameterDataSet` and the `PlanetaryDataSet` are concrete implementation of this `DataSet` structure.\n", - "\n", - "### Version compatibility\n", - "\n", - "ANISE guarantees to _not_ change the structure of these kernels in between patch versions (e.g. version `0.3.0` and version `0.3.99` are garanteed to have the compatible kernels). However, until version `1.0.0`, the structure _may_ change and if so, the updated version of the default PCA and EPA files will be added to the Nyx Space cloud.\n", - "\n", - "Since version `0.1.0`, the structure of the kernels has _not_ changed. However, the ANISE version is encoded at the start of each kernel. This is only used if the data set cannot be properly decoded to inform the user of the expected ANISE version and the one that they're trying to load. In other words, although there is a version `0.3` of the PCK08 and PCK11 kernels, the files used in version `0.1.0` are still compatible." - ] - }, - { - "cell_type": "markdown", - "id": "c0233d36-b01b-472b-9880-d1119099cc7c", - "metadata": {}, - "source": [ - "## Planetary Constant ANISE kernels\n", - "\n", - "Planetary Constant ANISE (PCA) kernels (or \"data set\") include a look up table for random access via a name or an ID, metadata, and more important the actual planetary data itself. This data include gravitational parameters, shape of the triaxial ellipsoid, phase angle polynominals for the prime meridian, pole right ascension and declination, and more. You'll find all of the specifications in the API documentation: [`PlanetaryData`](https://docs.rs/anise/latest/anise/structure/planetocentric/struct.PlanetaryData.html).\n", - "\n", - "In the previous tutorials, we focused on fetching the frame information from the Almanac. This operation reads the PCA to return a copy of this information. This is why a PCA is provided in the `latest.dhall` configuration file for an Almanac.\n", - "\n", - "The Planetary Data structure includes the gravitational data. _However_, the SPICE TPC files contain _either_ ellipsoid definition information _or_ gravitational data. Therefore, to build a PCA, ANISE requires _both_ versions of the TPCs.\n", - "\n", - "Let's go ahead and build a PCA file from the NAIF `pck0008.tpc` and the `gm_de431.tpc` files." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "d8f0c21e-5075-4179-a3da-d85d35078709", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\u001b[0;31mSignature:\u001b[0m \u001b[0mconvert_tpc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpck_file_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgm_file_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0manise_output_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moverwrite\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mDocstring:\u001b[0m\n", - "Converts two KPL/TPC files, one defining the planetary constants as text, and the other defining the gravity parameters, into the PlanetaryDataSet equivalent ANISE file.\n", - "KPL/TPC files must be converted into \"PCA\" (Planetary Constant ANISE) files before being loaded into ANISE.\n", - "\u001b[0;31mType:\u001b[0m builtin_function_or_method" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from anise.utils import convert_tpc\n", - "convert_tpc?" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "4f28e5f6-a768-4798-be37-a922948423d8", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Skipping 802: no gravity data\n", - "Skipping 806: no gravity data\n", - "Skipping 9511010: no gravity data\n", - "Skipping 514: no gravity data\n", - "Skipping 714: no gravity data\n", - "Skipping 509: no gravity data\n", - "Skipping 707: no gravity data\n", - "Skipping 711: no gravity data\n", - "Skipping 804: no gravity data\n", - "Skipping 2431010: no gravity data\n", - "Skipping 506: no gravity data\n", - "Skipping 710: no gravity data\n", - "Skipping 712: no gravity data\n", - "Skipping 612: no gravity data\n", - "Skipping 618: no gravity data\n", - "Skipping 713: no gravity data\n", - "Skipping 803: no gravity data\n", - "Skipping 508: no gravity data\n", - "Skipping 515: no gravity data\n", - "Skipping 715: no gravity data\n", - "Skipping 511: no gravity data\n", - "Skipping 706: no gravity data\n", - "Skipping 516: no gravity data\n", - "Skipping 507: no gravity data\n", - "Skipping 614: no gravity data\n", - "Skipping 808: no gravity data\n", - "Skipping 805: no gravity data\n", - "Skipping 807: no gravity data\n", - "Skipping 513: no gravity data\n", - "Skipping 2000216: no gravity data\n", - "Skipping 613: no gravity data\n", - "Skipping 708: no gravity data\n", - "Skipping 512: no gravity data\n", - "Skipping 709: no gravity data\n", - "Skipping 510: no gravity data\n" - ] + "cells": [ + { + "cell_type": "markdown", + "id": "89f0eb3a-044d-4337-9cef-43a41cbef37c", + "metadata": {}, + "source": [ + "# ANISE\n", + "\n", + "ANISE is a modern rewrite of NAIF SPICE, written in Rust and providing interfaces to other languages including Python.\n", + "\n", + "## Goal\n", + "By the end of this tutorial, you should be able to know how to use your custom frame kernels (FK) and text planetary constant kernels (TPC) in ANISE.\n", + "\n", + "Let's start by installing ANISE: `pip install anise`" + ] + }, + { + "cell_type": "markdown", + "id": "1ee5f04e-762a-41d7-8d3b-ac537ae7e10c", + "metadata": {}, + "source": [ + "## Data structure\n", + "\n", + "SPICE supports text-based kernels. These allow for easy modification and inclusion of documentation directly in the text file. The way SPICE handles these is by keeping a file handler open on the file while it's loaded. This causes a number of issues, such as potential file locking problems, increased risk of data corruption in cases of unexpected software termination, and increased difficulty in managing concurrent access, especially pertinent in the context of onboard flight software. To prevent these issues, ANISE maintains a pointer to the data contained in these files in memory (via a memory mapping or a heap allocation). Moreover, ANISE ensures that the data it uses allows for immediate random access, which is a method of accessing data at any point in memory with equal speed, independent of its location, thereby speeding up searches within the kernel data. To facilitate this, the data is stored in a platform-independent binary structure using the ASN.1 DER specification (the telecommunications industry standard for 20+ years). ASN.1 offers advantages like well-defined data structures, robust encoding schemes, and ease of interoperability across different platforms, making it a reliable choice for storing kernel data.\n", + "\n", + "This approach allows ANISE to parse through the FK and TPC equivalents significantly faster than SPICE, while storing the same information.\n", + "\n", + "However, this also means that ANISE cannot load the text files directly. Instead, these must be converted into the ANISE format, respectively PCA and EPA for \"Planetary Constants ANISE\" kernel and \"Euler Parameter ANISE\" kernel. Euler parameters, also known as quaternion parameters, offer a compact and non-redundant representation of orientations in three dimensions, differing from quaternions in their normalization constraint and being slightly more robust to numerical errors in certain computational scenarios.\n", + "\n", + "For details about the data set structure, refer to the API documentation: [`DataSet`](https://docs.rs/anise/latest/anise/structure/dataset/struct.DataSet.html). The `EulerParameterDataSet` and the `PlanetaryDataSet` are concrete implementation of this `DataSet` structure.\n", + "\n", + "### Version compatibility\n", + "\n", + "ANISE guarantees to _not_ change the structure of these kernels in between patch versions (e.g. version `0.3.0` and version `0.3.99` are garanteed to have the compatible kernels). However, until version `1.0.0`, the structure _may_ change and if so, the updated version of the default PCA and EPA files will be added to the Nyx Space cloud.\n", + "\n", + "Since version `0.1.0`, the structure of the kernels has _not_ changed. However, the ANISE version is encoded at the start of each kernel. This is only used if the data set cannot be properly decoded to inform the user of the expected ANISE version and the one that they're trying to load. In other words, although there is a version `0.3` of the PCK08 and PCK11 kernels, the files used in version `0.1.0` are still compatible." + ] + }, + { + "cell_type": "markdown", + "id": "c0233d36-b01b-472b-9880-d1119099cc7c", + "metadata": {}, + "source": [ + "## Planetary Constant ANISE kernels\n", + "\n", + "Planetary Constant ANISE (PCA) kernels (or \"data set\") include a look up table for random access via a name or an ID, metadata, and more important the actual planetary data itself. This data include gravitational parameters, shape of the triaxial ellipsoid, phase angle polynominals for the prime meridian, pole right ascension and declination, and more. You'll find all of the specifications in the API documentation: [`PlanetaryData`](https://docs.rs/anise/latest/anise/structure/planetocentric/struct.PlanetaryData.html).\n", + "\n", + "In the previous tutorials, we focused on fetching the frame information from the Almanac. This operation reads the PCA to return a copy of this information. This is why a PCA is provided in the `latest.dhall` configuration file for an Almanac.\n", + "\n", + "The Planetary Data structure includes the gravitational data. _However_, the SPICE TPC files contain _either_ ellipsoid definition information _or_ gravitational data. Therefore, to build a PCA, ANISE requires _both_ versions of the TPCs.\n", + "\n", + "Let's go ahead and build a PCA file from the NAIF `pck0008.tpc` and the `gm_de431.tpc` files." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "d8f0c21e-5075-4179-a3da-d85d35078709", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\u001b[0;31mSignature:\u001b[0m \u001b[0mconvert_tpc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpck_file_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgm_file_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0manise_output_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moverwrite\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mDocstring:\u001b[0m\n", + "Converts two KPL/TPC files, one defining the planetary constants as text, and the other defining the gravity parameters, into the PlanetaryDataSet equivalent ANISE file.\n", + "KPL/TPC files must be converted into \"PCA\" (Planetary Constant ANISE) files before being loaded into ANISE.\n", + "\u001b[0;31mType:\u001b[0m builtin_function_or_method" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from anise.utils import convert_tpc\n", + "convert_tpc?" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "4f28e5f6-a768-4798-be37-a922948423d8", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Skipping 802: no gravity data\n", + "Skipping 806: no gravity data\n", + "Skipping 9511010: no gravity data\n", + "Skipping 514: no gravity data\n", + "Skipping 714: no gravity data\n", + "Skipping 509: no gravity data\n", + "Skipping 707: no gravity data\n", + "Skipping 711: no gravity data\n", + "Skipping 804: no gravity data\n", + "Skipping 2431010: no gravity data\n", + "Skipping 506: no gravity data\n", + "Skipping 710: no gravity data\n", + "Skipping 712: no gravity data\n", + "Skipping 612: no gravity data\n", + "Skipping 618: no gravity data\n", + "Skipping 713: no gravity data\n", + "Skipping 803: no gravity data\n", + "Skipping 508: no gravity data\n", + "Skipping 515: no gravity data\n", + "Skipping 715: no gravity data\n", + "Skipping 511: no gravity data\n", + "Skipping 706: no gravity data\n", + "Skipping 516: no gravity data\n", + "Skipping 507: no gravity data\n", + "Skipping 614: no gravity data\n", + "Skipping 808: no gravity data\n", + "Skipping 805: no gravity data\n", + "Skipping 807: no gravity data\n", + "Skipping 513: no gravity data\n", + "Skipping 2000216: no gravity data\n", + "Skipping 613: no gravity data\n", + "Skipping 708: no gravity data\n", + "Skipping 512: no gravity data\n", + "Skipping 709: no gravity data\n", + "Skipping 510: no gravity data\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added 49 items\n" + ] + } + ], + "source": [ + "convert_tpc(\"../../data/pck00008.tpc\", \"../../data/gm_de431.tpc\", \"demo08.pca\", True)" + ] + }, + { + "cell_type": "markdown", + "id": "443e0da7-cebe-4dd6-97e6-bee3b8f0450c", + "metadata": {}, + "source": [ + "We now have a PCA called `demo08.pca` which includes 49 entries. This file is compatible with _any_ machine you run ANISE on, little or big endian (which is _not_ the case of the DAF/BSP or DAF/BPC files).\n", + "\n", + "Let's load this file in an Almanac." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "24fa8803-6aa6-493b-a887-89f6e5e2f29a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Almanac: #SPK = 0\t#BPC = 0\tPlanetaryData with 49 ID mappings and 0 name mappings (@0x559b4af30800)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from anise import Almanac\n", + "almanac = Almanac(\"demo08.pca\")\n", + "almanac" + ] + }, + { + "cell_type": "markdown", + "id": "4524157c-6d4d-4acb-93d0-ed0c02fcc883", + "metadata": {}, + "source": [ + "Since we don't have anything loaded other than these planetary constants, we can't do a whole load, but we can query the Almanac for the shape and gravitational data by using `frame_info`." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "ab2d1579-411e-4cdb-ab2c-19fb15292cb0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "body 599 IAU_JUPITER (ΞΌ = 126686534.9218008 km^3/s^2, eq. radius = 71492 km, polar radius = 66854 km, f = 0.0648743915403122) (@0x7fe814070310)" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from anise.astro.constants import Frames\n", + "iau_jupiter_frame = almanac.frame_info(Frames.IAU_JUPITER_FRAME)\n", + "iau_jupiter_frame" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "0a9b1b53-dea9-4e91-939b-15c310b0a92f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "126686534.9218008" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "iau_jupiter_frame.mu_km3_s2()" + ] + }, + { + "cell_type": "markdown", + "id": "1c627e1c-0e9a-4965-b4b6-afa83be211b0", + "metadata": {}, + "source": [ + "### Exercise: Modifying the gravitational parameter of the Earth frame\n", + "\n", + "+ Make a copy of the `gm_de431.tpc` file and remove everything but the Earth GM.\n", + "+ Set the Earth GM to the one used by GMAT: `398600.4415` km^3/s^2\n", + "+ Make a copy of the `pck00008.tpc` file, removing everything except Earth related data.\n", + "+ Build a new PCA file using these two new files\n", + "+ Load a default Almanac and an empty Almanac where you'll load these two files into, along with the `DE440s.bsp` file.\n", + "+ Query the Cartesian state of the Earth at any time of your choosing from both of these Almanac. The state should be the same, since it'll be from the DE440s.bsp.\n", + "+ Observe how the frame graviational parameter information of both will differ.\n", + "+ Finally, using the `at_epoch` function on both of these state, perform a two-body propagation for both and notice how the graviational parameter affects the result." + ] + }, + { + "cell_type": "markdown", + "id": "652143bf-5066-4291-aac7-a663982333de", + "metadata": {}, + "source": [ + "## Euler Parameter ANISE kernel\n", + "\n", + "Euler parameters are a normalized quaternion. In fact, in the ANISE code in Rust, `EulerParameter` is an alias for `Quaternion`. Euler parameters are useful for defining fixed rotations, e.g. the rotation from the Moon Principal Axes frame to the Moon Mean Earth frame.\n", + "\n", + "Euler Parameter ANISE kernels (EPA) can be used to store these fixed rotations between frames, and reference them either by their ID or by their name. **They are the equivalent of SPICE's `FK` text files.**\n", + "\n", + "Until [#175](https://github.com/nyx-space/anise/issues/175), rotation data is _not_ exposed to Python. \n", + "\n", + "However, it's possible to convert an FK file into the EPA file using the following function." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "263507b1-8f58-44e5-b0b3-4abcf0a00b92", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\u001b[0;31mSignature:\u001b[0m\n", + "\u001b[0mconvert_fk\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0mfk_file_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0manise_output_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0mshow_comments\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0moverwrite\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mDocstring:\u001b[0m\n", + "Converts a KPL/FK file, that defines frame constants like fixed rotations, and frame name to ID mappings into the EulerParameterDataSet equivalent ANISE file.\n", + "KPL/FK files must be converted into \"PCA\" (Planetary Constant ANISE) files before being loaded into ANISE.\n", + "\u001b[0;31mType:\u001b[0m builtin_function_or_method" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from anise.utils import convert_fk\n", + "convert_fk?" + ] + }, + { + "cell_type": "markdown", + "id": "e26f92f9-9fa6-4fb7-b142-8bcaf0451b28", + "metadata": {}, + "source": [ + "On the Nyx Space Cloud, you'll find the `moon.fk` file, which includes the mapping between the Moon PA and Moon ME frame. The latest Almanac also includes the high precision Moon ME frame. Hence, with default data, you can rotate an object from any frame into the high precision Moon ME frame.\n", + "\n", + "## Exercise\n", + "\n", + "Using tutorial 04, compute the azimuth, elevation, and range of the [Shackleton](https://en.wikipedia.org/wiki/Shackleton_(crater)) crater on the Moon to the city of Paris, France.\n", + "\n", + "Here are the general steps:\n", + "\n", + "1. Load the latest Almanac, and check (by printing it) that it includes both EPA and PCA data. Else, load the moon_fk.epa file from the Nyx Space Cloud using a MetaFile with the URL .\n", + "2. Define a time series over a year with a granularity of 12 hours. This crater is on the South Pole of the Moon, and its visibility is often below the horizon of an object as far north as Paris.\n", + "3. For each epoch, define Paris as an `Orbit` instance from its longitude and latitde (recall that the constants include the mean Earth angular rotation rate), in the IAU_EARTH frame. Also build the crater in the IAU_MOON frame.\n", + "4. Finally, call the AER function of the Almanac with each epoch to compute the AER data. Plot it!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0a91851b-188a-4dc8-9e8c-40276012dbd1", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": ".venv" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Added 49 items\n" - ] - } - ], - "source": [ - "convert_tpc(\"../../data/pck00008.tpc\", \"../../data/gm_de431.tpc\", \"demo08.pca\", True)" - ] - }, - { - "cell_type": "markdown", - "id": "443e0da7-cebe-4dd6-97e6-bee3b8f0450c", - "metadata": {}, - "source": [ - "We now have a PCA called `demo08.pca` which includes 49 entries. This file is compatible with _any_ machine you run ANISE on, little or big endian (which is _not_ the case of the DAF/BSP or DAF/BPC files).\n", - "\n", - "Let's load this file in an Almanac." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "24fa8803-6aa6-493b-a887-89f6e5e2f29a", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Almanac: #SPK = 0\t#BPC = 0\tPlanetaryData with 49 ID mappings and 0 name mappings (@0x559b4af30800)" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from anise import Almanac\n", - "almanac = Almanac(\"demo08.pca\")\n", - "almanac" - ] - }, - { - "cell_type": "markdown", - "id": "4524157c-6d4d-4acb-93d0-ed0c02fcc883", - "metadata": {}, - "source": [ - "Since we don't have anything loaded other than these planetary constants, we can't do a whole load, but we can query the Almanac for the shape and gravitational data by using `frame_info`." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "ab2d1579-411e-4cdb-ab2c-19fb15292cb0", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "body 599 IAU_JUPITER (ΞΌ = 126686534.9218008 km^3/s^2, eq. radius = 71492 km, polar radius = 66854 km, f = 0.0648743915403122) (@0x7fe814070310)" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from anise.astro.constants import Frames\n", - "iau_jupiter_frame = almanac.frame_info(Frames.IAU_JUPITER_FRAME)\n", - "iau_jupiter_frame" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "0a9b1b53-dea9-4e91-939b-15c310b0a92f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "126686534.9218008" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "iau_jupiter_frame.mu_km3_s2()" - ] - }, - { - "cell_type": "markdown", - "id": "1c627e1c-0e9a-4965-b4b6-afa83be211b0", - "metadata": {}, - "source": [ - "### Exercise: Modifying the gravitational parameter of the Earth frame\n", - "\n", - "+ Make a copy of the `gm_de431.tpc` file and remove everything but the Earth GM.\n", - "+ Set the Earth GM to the one used by GMAT: `398600.4415` km^3/s^2\n", - "+ Make a copy of the `pck00008.tpc` file, removing everything except Earth related data.\n", - "+ Build a new PCA file using these two new files\n", - "+ Load a default Almanac and an empty Almanac where you'll load these two files into, along with the `DE440s.bsp` file.\n", - "+ Query the Cartesian state of the Earth at any time of your choosing from both of these Almanac. The state should be the same, since it'll be from the DE440s.bsp.\n", - "+ Observe how the frame graviational parameter information of both will differ.\n", - "+ Finally, using the `at_epoch` function on both of these state, perform a two-body propagation for both and notice how the graviational parameter affects the result." - ] - }, - { - "cell_type": "markdown", - "id": "652143bf-5066-4291-aac7-a663982333de", - "metadata": {}, - "source": [ - "## Euler Parameter ANISE kernel\n", - "\n", - "Euler parameters are a normalized quaternion. In fact, in the ANISE code in Rust, `EulerParameter` is an alias for `Quaternion`. Euler parameters are useful for defining fixed rotations, e.g. the rotation from the Moon Principal Axes frame to the Moon Mean Earth frame.\n", - "\n", - "Euler Parameter ANISE kernels (EPA) can be used to store these fixed rotations between frames, and reference them either by their ID or by their name. **They are the equivalent of SPICE's `FK` text files.**\n", - "\n", - "Until [#175](https://github.com/nyx-space/anise/issues/175), rotation data is _not_ exposed to Python. \n", - "\n", - "However, it's possible to convert an FK file into the EPA file using the following function." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "263507b1-8f58-44e5-b0b3-4abcf0a00b92", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\u001b[0;31mSignature:\u001b[0m\n", - "\u001b[0mconvert_fk\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0mfk_file_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0manise_output_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0mshow_comments\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0moverwrite\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mDocstring:\u001b[0m\n", - "Converts a KPL/FK file, that defines frame constants like fixed rotations, and frame name to ID mappings into the EulerParameterDataSet equivalent ANISE file.\n", - "KPL/FK files must be converted into \"PCA\" (Planetary Constant ANISE) files before being loaded into ANISE.\n", - "\u001b[0;31mType:\u001b[0m builtin_function_or_method" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from anise.utils import convert_fk\n", - "convert_fk?" - ] - }, - { - "cell_type": "markdown", - "id": "e26f92f9-9fa6-4fb7-b142-8bcaf0451b28", - "metadata": {}, - "source": [ - "On the Nyx Space Cloud, you'll find the `moon.fk` file, which includes the mapping between the Moon PA and Moon ME frame. The latest Almanac also includes the high precision Moon ME frame. Hence, with default data, you can rotate an object from any frame into the high precision Moon ME frame.\n", - "\n", - "## Exercise\n", - "\n", - "Using tutorial 04, compute the azimuth, elevation, and range of the [Shackleton](https://en.wikipedia.org/wiki/Shackleton_(crater)) crater on the Moon to the city of Paris, France.\n", - "\n", - "Here are the general steps:\n", - "\n", - "1. Load the latest Almanac, and check (by printing it) that it includes both EPA and PCA data. Else, load the moon_fk.epa file from the Nyx Space Cloud using a MetaFile with the URL .\n", - "2. Define a time series over a year with a granularity of 12 hours. This crater is on the South Pole of the Moon, and its visibility is often below the horizon of an object as far north as Paris.\n", - "3. For each epoch, define Paris as an `Orbit` instance from its longitude and latitde (recall that the constants include the mean Earth angular rotation rate), in the IAU_EARTH frame. Also build the crater in the IAU_MOON frame.\n", - "4. Finally, call the AER function of the Almanac with each epoch to compute the AER data. Plot it!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0a91851b-188a-4dc8-9e8c-40276012dbd1", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": ".venv" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file diff --git a/anise/README.md b/anise/README.md index 4d411c75..8370a89a 100644 --- a/anise/README.md +++ b/anise/README.md @@ -183,9 +183,9 @@ For convenience, Nyx Space provides a few important SPICE files on a public buck + [de440s.bsp](http://public-data.nyxspace.com/anise/de440s.bsp): JPL's latest ephemeris dataset from 1900 until 20250 + [de440.bsp](http://public-data.nyxspace.com/anise/de440.bsp): JPL's latest long-term ephemeris dataset -+ [pck08.pca](http://public-data.nyxspace.com/anise/v0.3/pck08.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00008.tpc](http://public-data.nyxspace.com/anise/pck00008.tpc) -+ [pck11.pca](http://public-data.nyxspace.com/anise/v0.3/pck11.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00011.tpc](http://public-data.nyxspace.com/anise/pck00011.tpc) -+ [moon_fk.epa](http://public-data.nyxspace.com/anise/v0.3/moon_fk.epa): Euler Parameter ANISE (`epa`) kernel, built from the JPL Moon Frame Kernel `moon_080317.txt` ++ [pck08.pca](http://public-data.nyxspace.com/anise/v0.4/pck08.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00008.tpc](http://public-data.nyxspace.com/anise/pck00008.tpc) ++ [pck11.pca](http://public-data.nyxspace.com/anise/v0.4/pck11.pca): planetary constants ANISE (`pca`) kernel, built from the JPL gravitational data [gm_de431.tpc](http://public-data.nyxspace.com/anise/gm_de431.tpc) and JPL's plantary constants file [pck00011.tpc](http://public-data.nyxspace.com/anise/pck00011.tpc) ++ [moon_fk.epa](http://public-data.nyxspace.com/anise/v0.4/moon_fk.epa): Euler Parameter ANISE (`epa`) kernel, built from the JPL Moon Frame Kernel `moon_080317.txt` You may load any of these using the `load()` shortcut that will determine the file type upon loading, e.g. `let almanac = Almanac::new("pck08.pca").unwrap();` or in Python `almanac = Almanac("pck08.pca")`. To automatically download remote assets, from the Nyx Cloud or elsewhere, use the MetaAlmanac: `almanac = MetaAlmanac("ci_config.dhall").process()` in Python. diff --git a/anise/src/almanac/aer.rs b/anise/src/almanac/aer.rs index 5a89be16..9331c57a 100644 --- a/anise/src/almanac/aer.rs +++ b/anise/src/almanac/aer.rs @@ -161,11 +161,13 @@ mod ut_aer { let eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); // Now iterate the trajectory to generate the measurements. - let gmat_ranges_km = [9.145_755_787_575_61e4, + let gmat_ranges_km = [ + 9.145_755_787_575_61e4, 9.996_505_560_799_869e4, 1.073_229_118_411_670_2e5, 1.145_516_751_191_464_7e5, - 1.265_739_190_638_930_7e5]; + 1.265_739_190_638_930_7e5, + ]; let states = [ CartesianState::new( diff --git a/anise/src/almanac/metaload/metaalmanac.rs b/anise/src/almanac/metaload/metaalmanac.rs index fde5cc1e..05c82496 100644 --- a/anise/src/almanac/metaload/metaalmanac.rs +++ b/anise/src/almanac/metaload/metaalmanac.rs @@ -201,8 +201,8 @@ impl MetaAlmanac { /// /// # File list /// - -/// - -/// - +/// - +/// - /// - /// - /// diff --git a/data/example_meta.dhall b/data/example_meta.dhall index 9502324a..c714370a 100644 --- a/data/example_meta.dhall +++ b/data/example_meta.dhall @@ -9,7 +9,7 @@ let Meta let NyxAsset : Text -> Text - = \(file : Text) -> "http://public-data.nyxspace.com/anise/v0.3/${file}" + = \(file : Text) -> "http://public-data.nyxspace.com/anise/v0.4/${file}" let JplAsset : Text -> Text From fc9c6d26511d397c55b8e5f4583d56f910421f0c Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Fri, 7 Jun 2024 07:57:31 -0600 Subject: [PATCH 16/17] Version bump for Pytest --- .github/workflows/python.yml | 2 +- .github/workflows/rust.yml | 8 ++++---- Cargo.toml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 36f69c76..09ab1776 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -216,7 +216,7 @@ jobs: release: name: Release runs-on: ubuntu-latest - if: "startsWith(github.ref, 'refs/tags/')" + if: github.ref_type == 'tag' needs: [linux, windows, macos, sdist] steps: - uses: actions/download-artifact@v3 diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index f8bd2a99..c860cdf4 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -197,12 +197,11 @@ jobs: release: name: Release runs-on: ubuntu-latest - needs: [check, test, lints, validation, coverage] - - if: "startsWith(github.ref, 'refs/tags/')" + needs: [check, test, validation, lints] + if: github.ref_type == 'tag' steps: - name: Checkout sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install stable toolchain uses: dtolnay/rust-toolchain@master @@ -214,6 +213,7 @@ jobs: env: TOKEN: ${{ secrets.CRATESIO_API_TOKEN }} run: | + cd anise # Jump into the package cargo login $TOKEN cargo publish \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 53f89b36..8d8d79ee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ resolver = "2" members = ["anise", "anise-cli", "anise-gui", "anise-py"] [workspace.package] -version = "0.3.2" +version = "0.4.0" edition = "2021" authors = ["Christopher Rabotin "] description = "ANISE provides a toolkit and files for Attitude, Navigation, Instrument, Spacecraft, and Ephemeris data. It's a modern replacement of NAIF SPICE file." From 987de89556b4fa8ea91ff6da5204cd0fa471e893 Mon Sep 17 00:00:00 2001 From: Christopher Rabotin Date: Fri, 7 Jun 2024 08:03:02 -0600 Subject: [PATCH 17/17] Continue fix version --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 8d8d79ee..8bfd89d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,7 +52,7 @@ serde = "1" serde_derive = "1" serde_dhall = "0.12" -anise = { version = "0.3.2", path = "anise", default-features = false } +anise = { version = "0.4.0", path = "anise", default-features = false } [profile.bench] debug = true