Skip to content

Commit

Permalink
chore(lint): apply rustfmt
Browse files Browse the repository at this point in the history
  • Loading branch information
slavik-pastushenko committed Nov 7, 2023
1 parent 7e1091c commit 0a2055c
Show file tree
Hide file tree
Showing 5 changed files with 167 additions and 120 deletions.
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,12 @@ Run [clippy](https://github.com/rust-lang/rust-clippy):
cargo clippy --all-targets --all-features -- -D warnings
```

Run [lint](https://github.com/rust-lang/rustfmt):

```bash
cargo fmt
```

Generate documentation in HTML format:

```bash
Expand Down
78 changes: 37 additions & 41 deletions src/kdbush.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,16 +211,20 @@ impl KDBush {
let m = k - left + 1;
let z = (n as f64).ln();
let s = 0.5 * ((2.0 * z) / 3.0).exp();
let sds = if (m as f64) - (n as f64) / 2.0 < 0.0 { -1.0 } else { 1.0 };
let sds = if (m as f64) - (n as f64) / 2.0 < 0.0 {
-1.0
} else {
1.0
};
let n_s = (n as f64) - s;
let sd = 0.5 * ((z * s * n_s) / (n as f64)).sqrt() * sds;
let new_left = KDBush::get_max(
left,
((k as f64) - ((m as f64) * s) / (n as f64) + sd).floor() as usize
((k as f64) - ((m as f64) * s) / (n as f64) + sd).floor() as usize,
);
let new_right = KDBush::get_min(
right,
((k as f64) + (((n - m) as f64) * s) / (n as f64) + sd).floor() as usize
((k as f64) + (((n - m) as f64) * s) / (n as f64) + sd).floor() as usize,
);

self.select(k, new_left, new_right, axis);
Expand Down Expand Up @@ -269,12 +273,20 @@ impl KDBush {

/// Return the maximum of two values
fn get_max(a: usize, b: usize) -> usize {
if a > b { a } else { b }
if a > b {
a
} else {
b
}
}

/// Return the minimum of two values
fn get_min(a: usize, b: usize) -> usize {
if a < b { a } else { b }
if a < b {
a
} else {
b
}
}

/// Swap the elements at two specified indices in the KD-tree data structures
Expand Down Expand Up @@ -402,27 +414,28 @@ mod tests {
];

pub const IDS: [usize; 100] = [
97, 74, 95, 30, 77, 38, 76, 27, 80, 55, 72, 90, 88, 48, 43, 46, 65, 39, 62, 93, 9, 96, 47, 8,
3, 12, 15, 14, 21, 41, 36, 40, 69, 56, 85, 78, 17, 71, 44, 19, 18, 13, 99, 24, 67, 33, 37, 49,
54, 57, 98, 45, 23, 31, 66, 68, 0, 32, 5, 51, 75, 73, 84, 35, 81, 22, 61, 89, 1, 11, 86, 52,
94, 16, 2, 6, 25, 92, 42, 20, 60, 58, 83, 79, 64, 10, 59, 53, 26, 87, 4, 63, 50, 7, 28, 82,
70, 29, 34, 91,
97, 74, 95, 30, 77, 38, 76, 27, 80, 55, 72, 90, 88, 48, 43, 46, 65, 39, 62, 93, 9, 96, 47,
8, 3, 12, 15, 14, 21, 41, 36, 40, 69, 56, 85, 78, 17, 71, 44, 19, 18, 13, 99, 24, 67, 33,
37, 49, 54, 57, 98, 45, 23, 31, 66, 68, 0, 32, 5, 51, 75, 73, 84, 35, 81, 22, 61, 89, 1,
11, 86, 52, 94, 16, 2, 6, 25, 92, 42, 20, 60, 58, 83, 79, 64, 10, 59, 53, 26, 87, 4, 63,
50, 7, 28, 82, 70, 29, 34, 91,
];

pub const COORDS: [f64; 200] = [
10.0, 20.0, 6.0, 22.0, 10.0, 10.0, 6.0, 27.0, 20.0, 42.0, 18.0, 28.0, 11.0, 23.0, 13.0, 25.0,
9.0, 40.0, 26.0, 4.0, 29.0, 50.0, 30.0, 38.0, 41.0, 11.0, 43.0, 12.0, 43.0, 3.0, 46.0, 12.0,
32.0, 14.0, 35.0, 15.0, 40.0, 31.0, 33.0, 18.0, 43.0, 15.0, 40.0, 34.0, 32.0, 38.0, 33.0, 34.0,
33.0, 54.0, 1.0, 61.0, 24.0, 56.0, 11.0, 91.0, 4.0, 98.0, 20.0, 81.0, 22.0, 93.0, 19.0, 81.0,
21.0, 67.0, 6.0, 76.0, 21.0, 72.0, 21.0, 73.0, 25.0, 57.0, 44.0, 64.0, 47.0, 66.0, 29.0, 69.0,
46.0, 61.0, 38.0, 74.0, 46.0, 78.0, 38.0, 84.0, 32.0, 88.0, 27.0, 91.0, 45.0, 94.0, 39.0, 94.0,
41.0, 92.0, 47.0, 21.0, 47.0, 29.0, 48.0, 34.0, 60.0, 25.0, 58.0, 22.0, 55.0, 6.0, 62.0, 32.0,
54.0, 1.0, 53.0, 28.0, 54.0, 3.0, 66.0, 14.0, 68.0, 3.0, 70.0, 5.0, 83.0, 6.0, 93.0, 14.0,
99.0, 2.0, 71.0, 15.0, 96.0, 18.0, 95.0, 20.0, 97.0, 21.0, 81.0, 23.0, 78.0, 30.0, 84.0, 30.0,
87.0, 28.0, 90.0, 31.0, 65.0, 35.0, 53.0, 54.0, 52.0, 38.0, 65.0, 48.0, 67.0, 53.0, 49.0, 60.0,
50.0, 68.0, 57.0, 70.0, 56.0, 77.0, 63.0, 86.0, 71.0, 90.0, 52.0, 83.0, 71.0, 82.0, 72.0, 81.0,
94.0, 51.0, 75.0, 53.0, 95.0, 39.0, 78.0, 53.0, 88.0, 62.0, 84.0, 72.0, 77.0, 73.0, 99.0, 76.0,
73.0, 81.0, 88.0, 87.0, 96.0, 98.0, 96.0, 82.0,
10.0, 20.0, 6.0, 22.0, 10.0, 10.0, 6.0, 27.0, 20.0, 42.0, 18.0, 28.0, 11.0, 23.0, 13.0,
25.0, 9.0, 40.0, 26.0, 4.0, 29.0, 50.0, 30.0, 38.0, 41.0, 11.0, 43.0, 12.0, 43.0, 3.0,
46.0, 12.0, 32.0, 14.0, 35.0, 15.0, 40.0, 31.0, 33.0, 18.0, 43.0, 15.0, 40.0, 34.0, 32.0,
38.0, 33.0, 34.0, 33.0, 54.0, 1.0, 61.0, 24.0, 56.0, 11.0, 91.0, 4.0, 98.0, 20.0, 81.0,
22.0, 93.0, 19.0, 81.0, 21.0, 67.0, 6.0, 76.0, 21.0, 72.0, 21.0, 73.0, 25.0, 57.0, 44.0,
64.0, 47.0, 66.0, 29.0, 69.0, 46.0, 61.0, 38.0, 74.0, 46.0, 78.0, 38.0, 84.0, 32.0, 88.0,
27.0, 91.0, 45.0, 94.0, 39.0, 94.0, 41.0, 92.0, 47.0, 21.0, 47.0, 29.0, 48.0, 34.0, 60.0,
25.0, 58.0, 22.0, 55.0, 6.0, 62.0, 32.0, 54.0, 1.0, 53.0, 28.0, 54.0, 3.0, 66.0, 14.0,
68.0, 3.0, 70.0, 5.0, 83.0, 6.0, 93.0, 14.0, 99.0, 2.0, 71.0, 15.0, 96.0, 18.0, 95.0, 20.0,
97.0, 21.0, 81.0, 23.0, 78.0, 30.0, 84.0, 30.0, 87.0, 28.0, 90.0, 31.0, 65.0, 35.0, 53.0,
54.0, 52.0, 38.0, 65.0, 48.0, 67.0, 53.0, 49.0, 60.0, 50.0, 68.0, 57.0, 70.0, 56.0, 77.0,
63.0, 86.0, 71.0, 90.0, 52.0, 83.0, 71.0, 82.0, 72.0, 81.0, 94.0, 51.0, 75.0, 53.0, 95.0,
39.0, 78.0, 53.0, 88.0, 62.0, 84.0, 72.0, 77.0, 73.0, 99.0, 76.0, 73.0, 81.0, 88.0, 87.0,
96.0, 98.0, 96.0, 82.0,
];

#[test]
Expand Down Expand Up @@ -457,24 +470,7 @@ mod tests {

let result = index.range(20.0, 30.0, 50.0, 70.0);
let expected_ids = vec![
60,
20,
45,
3,
17,
71,
44,
19,
18,
15,
69,
90,
62,
96,
47,
8,
77,
72
60, 20, 45, 3, 17, 71, 44, 19, 18, 15, 69, 90, 62, 96, 47, 8, 77, 72,
];

assert_eq!(result, expected_ids);
Expand Down
59 changes: 37 additions & 22 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

mod kdbush;

use std::f64::{ consts::PI, INFINITY };
use serde::{ Deserialize, Serialize };
use kdbush::KDBush;
use serde::{Deserialize, Serialize};
use std::f64::{consts::PI, INFINITY};

/// An offset index used to access the zoom level value associated with a cluster in the data arrays
const OFFSET_ZOOM: usize = 2;
Expand Down Expand Up @@ -229,19 +229,22 @@ impl Supercluster {
}

let tree = &self.trees[self.limit_zoom(zoom)];
let ids = tree.range(lng_x(min_lng), lat_y(max_lat), lng_x(max_lng), lat_y(min_lat));
let ids = tree.range(
lng_x(min_lng),
lat_y(max_lat),
lng_x(max_lng),
lat_y(min_lat),
);
let mut clusters = Vec::new();

for id in ids {
let k = self.stride * id;

clusters.push(
if tree.data[k + OFFSET_NUM] > 1.0 {
get_cluster_json(&tree.data, k, &self.cluster_props)
} else {
self.points[tree.data[k + OFFSET_ID] as usize].clone()
}
);
clusters.push(if tree.data[k + OFFSET_NUM] > 1.0 {
get_cluster_json(&tree.data, k, &self.cluster_props)
} else {
self.points[tree.data[k + OFFSET_ID] as usize].clone()
});
}

clusters
Expand Down Expand Up @@ -273,9 +276,8 @@ impl Supercluster {
return Err(error_msg);
}

let r =
self.options.radius /
(self.options.extent * f64::powf(2.0, (origin_zoom as f64) - 1.0));
let r = self.options.radius
/ (self.options.extent * f64::powf(2.0, (origin_zoom as f64) - 1.0));

let x = data[origin_id * self.stride];
let y = data[origin_id * self.stride + 1];
Expand Down Expand Up @@ -406,7 +408,7 @@ impl Supercluster {
cluster_id: usize,
limit: usize,
offset: usize,
mut skipped: usize
mut skipped: usize,
) -> usize {
let cluster = self.get_children(cluster_id).unwrap();

Expand All @@ -422,7 +424,7 @@ impl Supercluster {
child.properties.cluster_id.unwrap(),
limit,
offset,
skipped
skipped,
);
// Exit the cluster
}
Expand Down Expand Up @@ -480,7 +482,7 @@ impl Supercluster {
x: f64,
y: f64,
z2: f64,
tile: &mut Tile
tile: &mut Tile,
) {
for i in ids {
let k = i * self.stride;
Expand Down Expand Up @@ -518,7 +520,7 @@ impl Supercluster {
r#type: "Point".to_string(),
coordinates: vec![
(self.options.extent * (px * z2 - x)).round(),
(self.options.extent * (py * z2 - y)).round()
(self.options.extent * (py * z2 - y)).round(),
],
}),
});
Expand All @@ -534,7 +536,8 @@ impl Supercluster {
/// # Returns
/// The effective zoom level considering the configured minimum and maximum zoom levels.
fn limit_zoom(&self, zoom: i32) -> usize {
zoom.max(self.options.min_zoom).min(self.options.max_zoom + 1) as usize
zoom.max(self.options.min_zoom)
.min(self.options.max_zoom + 1) as usize
}

/// Cluster points on a given zoom level using a KD-tree and returns updated data arrays.
Expand Down Expand Up @@ -805,8 +808,14 @@ mod tests {

assert_eq!(result.r#type, "Feature".to_string());
assert_eq!(result.id, Some(0));
assert_eq!(result.geometry.as_ref().unwrap().r#type, "Point".to_string());
assert_eq!(result.geometry.unwrap().coordinates, vec![-180.0, 85.05112877980659]);
assert_eq!(
result.geometry.as_ref().unwrap().r#type,
"Point".to_string()
);
assert_eq!(
result.geometry.unwrap().coordinates,
vec![-180.0, 85.05112877980659]
);

let properties = result.properties;

Expand All @@ -827,8 +836,14 @@ mod tests {

assert_eq!(result.id, Some(0));
assert_eq!(result.r#type, "Feature".to_string());
assert_eq!(result.geometry.as_ref().unwrap().r#type, "Point".to_string());
assert_eq!(result.geometry.unwrap().coordinates, vec![-180.0, 85.05112877980659]);
assert_eq!(
result.geometry.as_ref().unwrap().r#type,
"Point".to_string()
);
assert_eq!(
result.geometry.unwrap().coordinates,
vec![-180.0, 85.05112877980659]
);

let properties = result.properties;

Expand Down
9 changes: 4 additions & 5 deletions tests/common/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::{ path::Path, fs };
use supercluster::{ Tile, Options, Feature };
use std::{fs, path::Path};
use supercluster::{Feature, Options, Tile};

#[allow(dead_code)]
pub fn get_options(radius: f64, extent: f64, min_points: i32, max_zoom: i32) -> Options {
Expand Down Expand Up @@ -32,9 +32,8 @@ pub fn load_tile_places() -> Tile {
#[allow(dead_code)]
pub fn load_tile_places_with_min_5() -> Tile {
let file_path = Path::new("./tests/common/places-tile-0-0-0-min-5.json");
let json_string = fs
::read_to_string(file_path)
.expect("places-tile-0-0-0-min-5.json was not found");
let json_string =
fs::read_to_string(file_path).expect("places-tile-0-0-0-min-5.json was not found");

serde_json::from_str(&json_string).expect("places-z0-0-0-min5.json was not parsed")
}
Loading

0 comments on commit 0a2055c

Please sign in to comment.