Skip to content

Commit

Permalink
Merge pull request #25 from magnusuMET/ci/typos
Browse files Browse the repository at this point in the history
Integrate support for 'typos'
  • Loading branch information
magnusuMET authored Oct 3, 2024
2 parents 76a9741 + 5e4294d commit 5faee12
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 19 deletions.
9 changes: 9 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,15 @@ env:
CARGO_TERM_COLOR: always

jobs:
typos:
name: Spell checking (typos)
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Check spelling
uses: crate-ci/typos@f12cee1d8f3c79282a98ecb41d235aef17dfa8fd # v1.25.0

lint:
name: lint
runs-on: ubuntu-latest
Expand Down
10 changes: 10 additions & 0 deletions .typos.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
[files]
extend-exclude = [
"hdf5-src/ext",
"hdf5-sys/src",
]

[default]
extend-ignore-identifiers-re = [
"Dout",
]
2 changes: 1 addition & 1 deletion hdf5/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use crate::internal_prelude::*;

/// Silence errors emitted by `hdf5`
///
/// Safety: This version is not thread-safe and must be syncronised
/// Safety: This version is not thread-safe and must be synchronised
/// with other calls to `hdf5`
pub(crate) unsafe fn silence_errors_no_sync(silence: bool) {
// Cast function with different argument types. This is safe because H5Eprint2 is
Expand Down
4 changes: 2 additions & 2 deletions hdf5/src/hl/group.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ impl Group {
let vtable = unsafe { vtable.as_mut().expect("iter_visit: null op_data ptr") };
unsafe { name.as_ref().expect("iter_visit: null name ptr") };
let name = unsafe { std::ffi::CStr::from_ptr(name) };
let info = unsafe { info.as_ref().expect("iter_vist: null info ptr") };
let info = unsafe { info.as_ref().expect("iter_visit: null info ptr") };
let handle = Handle::try_borrow(id).expect("iter_visit: unable to create a handle");
let group = Group::from_handle(handle);
let ret =
Expand Down Expand Up @@ -371,7 +371,7 @@ impl Group {
return true; // ok, object is of another type, skipped
}
}
false // an error occured somewhere along the way
false // an error occurred somewhere along the way
})
}

Expand Down
8 changes: 4 additions & 4 deletions hdf5/src/hl/location.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,14 +84,14 @@ impl Location {
File::from_id(h5try!(H5Iget_file_id(self.id())))
}

/// Returns the commment attached to the named object, if any.
/// Returns the comment attached to the named object, if any.
pub fn comment(&self) -> Option<String> {
// TODO: should this return Result<Option<String>> or fail silently?
let comment = h5lock!(get_h5_str(|m, s| H5Oget_comment(self.id(), m, s)).ok());
comment.and_then(|c| if c.is_empty() { None } else { Some(c) })
}

/// Set or the commment attached to the named object.
/// Set or the comment attached to the named object.
#[deprecated(note = "attributes are preferred to comments")]
pub fn set_comment(&self, comment: &str) -> Result<()> {
// TODO: &mut self?
Expand All @@ -100,7 +100,7 @@ impl Location {
h5call!(H5Oset_comment(self.id(), comment.as_ptr())).and(Ok(()))
}

/// Clear the commment attached to the named object.
/// Clear the comment attached to the named object.
#[deprecated(note = "attributes are preferred to comments")]
pub fn clear_comment(&self) -> Result<()> {
// TODO: &mut self?
Expand Down Expand Up @@ -167,7 +167,7 @@ pub enum LocationType {
impl From<H5O_type_t> for LocationType {
fn from(loc_type: H5O_type_t) -> Self {
// we're assuming here that if a C API call returns H5O_TYPE_UNKNOWN (-1), then
// an error has occured anyway and has been pushed on the error stack so we'll
// an error has occurred anyway and has been pushed on the error stack so we'll
// catch it, and the value of -1 will never reach this conversion function
match loc_type {
H5O_type_t::H5O_TYPE_DATASET => Self::Dataset,
Expand Down
24 changes: 12 additions & 12 deletions hdf5/tests/test_dataset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,18 +205,18 @@ fn test_byte_read_seek_impl(ds: &hdf5::Dataset, arr: &ArrayD<u8>, ndim: usize) -
pos += chunk_len;
}

// Seek to the begining and read again
// Seek to the beginning and read again
reader.seek(SeekFrom::Start(0)).expect("io::Seek failed");
let mut out_bytes = vec![0u8; arr.len()];
reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed");
assert_eq!(out_bytes.as_slice(), arr.as_slice().unwrap());

// Seek to a random position from start
let pos = rng.gen_range(0..arr.len() + 1) as u64;
let seeked_pos = reader.seek(SeekFrom::Start(pos)).expect("io::Seek failed") as usize;
let mut out_bytes = vec![0u8; arr.len() - seeked_pos];
let seek_pos = reader.seek(SeekFrom::Start(pos)).expect("io::Seek failed") as usize;
let mut out_bytes = vec![0u8; arr.len() - seek_pos];
reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed");
assert_eq!(out_bytes.as_slice(), arr.slice(s![seeked_pos..]).as_slice().unwrap());
assert_eq!(out_bytes.as_slice(), arr.slice(s![seek_pos..]).as_slice().unwrap());

// Seek from current position
let orig_pos = reader.seek(SeekFrom::Start(pos)).expect("io::Seek failed") as i64;
Expand All @@ -225,20 +225,20 @@ fn test_byte_read_seek_impl(ds: &hdf5::Dataset, arr: &ArrayD<u8>, ndim: usize) -
if (rel_pos + orig_pos) < 0 {
assert!(pos_res.is_err()) // We cannot seek before start
} else {
let seeked_pos = pos_res.unwrap() as usize;
assert_eq!(rel_pos + orig_pos, seeked_pos as i64);
let mut out_bytes = vec![0u8; arr.len() - seeked_pos];
let seek_pos = pos_res.unwrap() as usize;
assert_eq!(rel_pos + orig_pos, seek_pos as i64);
let mut out_bytes = vec![0u8; arr.len() - seek_pos];
reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed");
assert_eq!(out_bytes.as_slice(), arr.slice(s![seeked_pos..]).as_slice().unwrap());
assert_eq!(out_bytes.as_slice(), arr.slice(s![seek_pos..]).as_slice().unwrap());
}

// Seek to a random position from end
let pos = -(rng.gen_range(0..arr.len() + 1) as i64);
let seeked_pos = reader.seek(SeekFrom::End(pos)).expect("io::Seek failed") as usize;
assert_eq!(pos, seeked_pos as i64 - arr.len() as i64);
let mut out_bytes = vec![0u8; arr.len() - seeked_pos];
let seek_pos = reader.seek(SeekFrom::End(pos)).expect("io::Seek failed") as usize;
assert_eq!(pos, seek_pos as i64 - arr.len() as i64);
let mut out_bytes = vec![0u8; arr.len() - seek_pos];
reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed");
assert_eq!(out_bytes.as_slice(), arr.slice(s![seeked_pos..]).as_slice().unwrap());
assert_eq!(out_bytes.as_slice(), arr.slice(s![seek_pos..]).as_slice().unwrap());

// Seek before start
assert!(reader.seek(SeekFrom::End(-(arr.len() as i64) - 1)).is_err());
Expand Down

0 comments on commit 5faee12

Please sign in to comment.