diff --git a/Cargo.toml b/Cargo.toml index bff5643306..c1719ddc4a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,7 +54,7 @@ cargo-pgrx = { path = "cargo-pgrx" } pgrx-macros = { path = "./pgrx-macros", version = "=0.12.0-alpha.0" } pgrx-pg-sys = { path = "./pgrx-pg-sys", version = "=0.12.0-alpha.0" } pgrx-sql-entity-graph = { path = "./pgrx-sql-entity-graph", version = "=0.12.0-alpha.0" } -pgrx-pg-config = { path = "./pgrx-pg-config/", version = "=0.12.0-alpha.0" } +pgrx-pg-config = { path = "./pgrx-pg-config", version = "=0.12.0-alpha.0" } cargo_toml = "0.16" # used for building projects eyre = "0.6.9" # simplifies error-handling diff --git a/cargo-pgrx/src/command/init.rs b/cargo-pgrx/src/command/init.rs index fe8ab225b3..5ea8317432 100644 --- a/cargo-pgrx/src/command/init.rs +++ b/cargo-pgrx/src/command/init.rs @@ -22,7 +22,7 @@ use std::collections::HashMap; use std::fs::File; use std::io::{Read, Write}; use std::num::NonZeroUsize; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::OnceLock; @@ -208,7 +208,7 @@ pub(crate) fn init_pgrx(pgrx: &Pgrx, init: &Init) -> eyre::Result<()> { output_configs.sort_by(|a, b| { a.major_version() .unwrap_or_else(|e| panic!("{e}: could not determine major version for: `{a:?}`")) - .cmp(&b.major_version().ok().expect("could not determine major version")) + .cmp(&b.major_version().expect("could not determine major version")) }); for pg_config in output_configs.iter() { validate_pg_config(pg_config)?; @@ -231,7 +231,7 @@ pub(crate) fn init_pgrx(pgrx: &Pgrx, init: &Init) -> eyre::Result<()> { #[tracing::instrument(level = "error", skip_all, fields(pg_version = %pg_config.version()?, pgrx_home))] fn download_postgres( pg_config: &PgConfig, - pgrx_home: &PathBuf, + pgrx_home: &Path, init: &Init, ) -> eyre::Result { use crate::command::build_agent_for_url; @@ -263,15 +263,10 @@ fn download_postgres( make_install_postgres(pg_config, &pgdir, init) // returns a new PgConfig object } -fn untar( - bytes: &[u8], - pgrxdir: &PathBuf, - pg_config: &PgConfig, - init: &Init, -) -> eyre::Result { +fn untar(bytes: &[u8], pgrxdir: &Path, pg_config: &PgConfig, init: &Init) -> eyre::Result { let _token = init.jobserver.get().unwrap().acquire().unwrap(); - let mut unpackdir = pgrxdir.clone(); + let mut unpackdir = pgrxdir.to_path_buf(); unpackdir.push(&format!("{}_unpack", pg_config.version()?)); if unpackdir.exists() { // delete everything at this path if it already exists @@ -289,7 +284,7 @@ fn untar( let mut tar_decoder = Archive::new(BzDecoder::new(bytes)); tar_decoder.unpack(&unpackdir)?; - let mut pgdir = pgrxdir.clone(); + let mut pgdir = pgrxdir.to_path_buf(); pgdir.push(&pg_config.version()?); if pgdir.exists() { // delete everything at this path if it already exists @@ -398,11 +393,11 @@ fn fixup_homebrew_for_icu(configure_cmd: &mut Command) { } } -fn configure_postgres(pg_config: &PgConfig, pgdir: &PathBuf, init: &Init) -> eyre::Result<()> { +fn configure_postgres(pg_config: &PgConfig, pgdir: &Path, init: &Init) -> eyre::Result<()> { let _token = init.jobserver.get().unwrap().acquire().unwrap(); println!("{} Postgres v{}", " Configuring".bold().green(), pg_config.version()?); - let mut configure_path = pgdir.clone(); + let mut configure_path = pgdir.to_path_buf(); configure_path.push("configure"); let mut command = std::process::Command::new(configure_path); // Some of these are redundant with `--enable-debug`. @@ -454,19 +449,16 @@ fn configure_postgres(pg_config: &PgConfig, pgdir: &PathBuf, init: &Init) -> eyr if output.status.success() { Ok(()) } else { - Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!( - "{}\n{}{}", - command_str, - String::from_utf8(output.stdout).unwrap(), - String::from_utf8(output.stderr).unwrap() - ), - ))? + Err(std::io::Error::other(format!( + "{}\n{}{}", + command_str, + String::from_utf8(output.stdout).unwrap(), + String::from_utf8(output.stderr).unwrap() + )))? } } -fn make_postgres(pg_config: &PgConfig, pgdir: &PathBuf, init: &Init) -> eyre::Result<()> { +fn make_postgres(pg_config: &PgConfig, pgdir: &Path, init: &Init) -> eyre::Result<()> { println!("{} Postgres v{}", " Compiling".bold().green(), pg_config.version()?); let mut command = std::process::Command::new("make"); @@ -499,11 +491,7 @@ fn make_postgres(pg_config: &PgConfig, pgdir: &PathBuf, init: &Init) -> eyre::Re } } -fn make_install_postgres( - version: &PgConfig, - pgdir: &PathBuf, - init: &Init, -) -> eyre::Result { +fn make_install_postgres(version: &PgConfig, pgdir: &Path, init: &Init) -> eyre::Result { println!( "{} Postgres v{} to {}", " Installing".bold().green(), @@ -582,8 +570,8 @@ fn write_config(pg_configs: &Vec, init: &Init) -> eyre::Result<()> { Ok(()) } -fn get_pg_installdir(pgdir: &PathBuf) -> PathBuf { - let mut dir = PathBuf::from(pgdir); +fn get_pg_installdir(pgdir: &Path) -> PathBuf { + let mut dir = pgdir.to_path_buf(); dir.push("pgrx-install"); dir } @@ -601,7 +589,7 @@ fn is_root_user() -> bool { false } -pub(crate) fn initdb(bindir: &PathBuf, datadir: &PathBuf) -> eyre::Result<()> { +pub(crate) fn initdb(bindir: &Path, datadir: &Path) -> eyre::Result<()> { println!(" {} data directory at {}", "Initializing".bold().green(), datadir.display()); let mut command = std::process::Command::new(format!("{}/initdb", bindir.display())); command diff --git a/cargo-pgrx/src/command/install.rs b/cargo-pgrx/src/command/install.rs index 091bb15e9f..5db49ee13d 100644 --- a/cargo-pgrx/src/command/install.rs +++ b/cargo-pgrx/src/command/install.rs @@ -168,7 +168,7 @@ pub(crate) fn install_extension( ); copy_file( &control_file, - &dest, + dest, "control file", true, &package_manifest_path, @@ -207,13 +207,13 @@ pub(crate) fn install_extension( // process which will mash up all pointers in the .TEXT segment. // this simulate linux's install(1) behavior if dest.exists() { - std::fs::remove_file(&dest) + fs::remove_file(&dest) .wrap_err_with(|| format!("unable to remove existing file {}", dest.display()))?; } copy_file( &shlibpath, - &dest, + dest, "shared library", false, &package_manifest_path, @@ -241,8 +241,8 @@ pub(crate) fn install_extension( } fn copy_file( - src: &PathBuf, - dest: &PathBuf, + src: &Path, + dest: PathBuf, msg: &str, do_filter: bool, package_manifest_path: impl AsRef, @@ -263,11 +263,11 @@ fn copy_file( })?, }; - println!("{} {} to {}", " Copying".bold().green(), msg, format_display_path(dest)?.cyan()); + println!("{} {} to {}", " Copying".bold().green(), msg, format_display_path(&dest)?.cyan()); if do_filter { // we want to filter the contents of the file we're to copy - let input = std::fs::read_to_string(src) + let input = fs::read_to_string(src) .wrap_err_with(|| format!("failed to read `{}`", src.display()))?; let mut input = filter_contents(package_manifest_path, input)?; @@ -275,16 +275,16 @@ fn copy_file( input = filter_out_fields_in_control(pg_config, input)?; } - std::fs::write(dest, input).wrap_err_with(|| { + fs::write(&dest, input).wrap_err_with(|| { format!("failed writing `{}` to `{}`", src.display(), dest.display()) })?; } else { - std::fs::copy(src, dest).wrap_err_with(|| { + fs::copy(src, &dest).wrap_err_with(|| { format!("failed copying `{}` to `{}`", src.display(), dest.display()) })?; } - output_tracking.push(dest.clone()); + output_tracking.push(dest); Ok(()) } @@ -347,10 +347,10 @@ pub(crate) fn build_extension( fn get_target_sql_file( manifest_path: impl AsRef, - extdir: &PathBuf, - base_directory: &PathBuf, + extdir: &Path, + base_directory: PathBuf, ) -> eyre::Result { - let mut dest = base_directory.clone(); + let mut dest = base_directory; dest.push(extdir); let (_, extname) = find_control_file(&manifest_path)?; @@ -368,12 +368,12 @@ fn copy_sql_files( profile: &CargoProfile, is_test: bool, features: &clap_cargo::Features, - extdir: &PathBuf, - base_directory: &PathBuf, + extdir: &Path, + base_directory: &Path, skip_build: bool, output_tracking: &mut Vec, ) -> eyre::Result<()> { - let dest = get_target_sql_file(&package_manifest_path, extdir, base_directory)?; + let dest = get_target_sql_file(&package_manifest_path, extdir, base_directory.to_path_buf())?; let (_, extname) = find_control_file(&package_manifest_path)?; crate::command::schema::generate_schema( @@ -392,26 +392,24 @@ fn copy_sql_files( )?; // now copy all the version upgrade files too - if let Ok(dir) = std::fs::read_dir("sql/") { - for sql in dir { - if let Ok(sql) = sql { - let filename = sql.file_name().into_string().unwrap(); - - if filename.starts_with(&format!("{extname}--")) && filename.ends_with(".sql") { - let mut dest = base_directory.clone(); - dest.push(extdir); - dest.push(filename); - - copy_file( - &sql.path(), - &dest, - "extension schema upgrade file", - true, - &package_manifest_path, - output_tracking, - pg_config, - )?; - } + if let Ok(dir) = fs::read_dir("sql/") { + for sql in dir.flatten() { + let filename = sql.file_name().into_string().unwrap(); + + if filename.starts_with(&format!("{extname}--")) && filename.ends_with(".sql") { + let mut dest = base_directory.to_path_buf(); + dest.push(extdir); + dest.push(filename); + + copy_file( + &sql.path(), + dest, + "extension schema upgrade file", + true, + &package_manifest_path, + output_tracking, + pg_config, + )?; } } } @@ -420,7 +418,7 @@ fn copy_sql_files( #[tracing::instrument(level = "error", skip_all)] pub(crate) fn find_library_file( - manifest: &cargo_toml::Manifest, + manifest: &Manifest, build_command_messages: &Vec, ) -> eyre::Result { let target_name = manifest.target_name()?; diff --git a/cargo-pgrx/src/command/new.rs b/cargo-pgrx/src/command/new.rs index 4aa5d5cf2a..ecbefc4c40 100644 --- a/cargo-pgrx/src/command/new.rs +++ b/cargo-pgrx/src/command/new.rs @@ -51,20 +51,18 @@ pub(crate) fn create_crate_template( name: &str, is_bgworker: bool, ) -> eyre::Result<()> { - create_directory_structure(&path)?; - create_control_file(&path, name)?; - create_cargo_toml(&path, name)?; - create_dotcargo_config_toml(&path, name)?; - create_lib_rs(&path, name, is_bgworker)?; - create_git_ignore(&path, name)?; - create_pgrx_embed_rs(&path)?; + create_directory_structure(path.clone())?; + create_control_file(path.clone(), name)?; + create_cargo_toml(path.clone(), name)?; + create_dotcargo_config_toml(path.clone(), name)?; + create_lib_rs(path.clone(), name, is_bgworker)?; + create_git_ignore(path.clone(), name)?; + create_pgrx_embed_rs(path)?; Ok(()) } -fn create_directory_structure(path: &PathBuf) -> Result<(), std::io::Error> { - let mut src_dir = path.clone(); - +fn create_directory_structure(mut src_dir: PathBuf) -> Result<(), std::io::Error> { src_dir.push("src"); std::fs::create_dir_all(&src_dir)?; @@ -85,9 +83,7 @@ fn create_directory_structure(path: &PathBuf) -> Result<(), std::io::Error> { Ok(()) } -fn create_control_file(path: &PathBuf, name: &str) -> Result<(), std::io::Error> { - let mut filename = path.clone(); - +fn create_control_file(mut filename: PathBuf, name: &str) -> Result<(), std::io::Error> { filename.push(format!("{name}.control")); let mut file = std::fs::File::create(filename)?; @@ -96,9 +92,7 @@ fn create_control_file(path: &PathBuf, name: &str) -> Result<(), std::io::Error> Ok(()) } -fn create_cargo_toml(path: &PathBuf, name: &str) -> Result<(), std::io::Error> { - let mut filename = path.clone(); - +fn create_cargo_toml(mut filename: PathBuf, name: &str) -> Result<(), std::io::Error> { filename.push("Cargo.toml"); let mut file = std::fs::File::create(filename)?; @@ -107,9 +101,7 @@ fn create_cargo_toml(path: &PathBuf, name: &str) -> Result<(), std::io::Error> { Ok(()) } -fn create_dotcargo_config_toml(path: &PathBuf, _name: &str) -> Result<(), std::io::Error> { - let mut filename = path.clone(); - +fn create_dotcargo_config_toml(mut filename: PathBuf, _name: &str) -> Result<(), std::io::Error> { filename.push(".cargo"); filename.push("config.toml"); let mut file = std::fs::File::create(filename)?; @@ -119,9 +111,11 @@ fn create_dotcargo_config_toml(path: &PathBuf, _name: &str) -> Result<(), std::i Ok(()) } -fn create_lib_rs(path: &PathBuf, name: &str, is_bgworker: bool) -> Result<(), std::io::Error> { - let mut filename = path.clone(); - +fn create_lib_rs( + mut filename: PathBuf, + name: &str, + is_bgworker: bool, +) -> Result<(), std::io::Error> { filename.push("src"); filename.push("lib.rs"); let mut file = std::fs::File::create(filename)?; @@ -137,9 +131,7 @@ fn create_lib_rs(path: &PathBuf, name: &str, is_bgworker: bool) -> Result<(), st Ok(()) } -fn create_git_ignore(path: &PathBuf, _name: &str) -> Result<(), std::io::Error> { - let mut filename = path.clone(); - +fn create_git_ignore(mut filename: PathBuf, _name: &str) -> Result<(), std::io::Error> { filename.push(".gitignore"); let mut file = std::fs::File::create(filename)?; @@ -148,8 +140,7 @@ fn create_git_ignore(path: &PathBuf, _name: &str) -> Result<(), std::io::Error> Ok(()) } -fn create_pgrx_embed_rs(path: &PathBuf) -> Result<(), std::io::Error> { - let mut filename = path.clone(); +fn create_pgrx_embed_rs(mut filename: PathBuf) -> Result<(), std::io::Error> { filename.push("src"); filename.push("bin"); filename.push(format!("pgrx_embed.rs")); diff --git a/cargo-pgrx/src/command/sudo_install.rs b/cargo-pgrx/src/command/sudo_install.rs index ed249a5acf..2e9302a644 100644 --- a/cargo-pgrx/src/command/sudo_install.rs +++ b/cargo-pgrx/src/command/sudo_install.rs @@ -70,10 +70,7 @@ impl CommandExecute for SudoInstall { for src in output_files { let src = src.canonicalize()?; let dest_abs = make_absolute(src.strip_prefix(&outdir)?); - let dest = match dest_abs.canonicalize() { - Ok(path) => path, - Err(_) => dest_abs, - }; + let dest = dest_abs.canonicalize().unwrap_or(dest_abs); // we're about to run `sudo` to copy some files, one at a time let mut command = Command::new("sudo"); // NB: If we ever support Windows... diff --git a/docs/src/extension/build/cross-compile.md b/docs/src/extension/build/cross-compile.md index a351a5b5cf..13e7e6f19b 100644 --- a/docs/src/extension/build/cross-compile.md +++ b/docs/src/extension/build/cross-compile.md @@ -87,7 +87,7 @@ Unfortunately, the cross-compilation process is quite distribution specific. We' Of the mainstream distributions (that is, excluding things like NixOS which apparently are designed to make this easy) the easiest path available is likely to be on Debian-family systems. This is for two reasons: -1. The cross compilation tools can be installed via an easy package like `crossbuild-essential-arm64` (when targetting `aarch64`) or `crossbuild-essential-amd64` (when targetting `x86_64`) +1. The cross compilation tools can be installed via an easy package like `crossbuild-essential-arm64` (when targeting `aarch64`) or `crossbuild-essential-amd64` (when targeting `x86_64`) 2. The cross compilation sysroot is the same as the normal sysroot -- they're both `/`. @@ -105,7 +105,7 @@ On the steps on Debian-family are as follows: - *`target=aarch64`*: `rustup target add aarch64-unknown-linux-gnu`. - *`target=x86_64`*: `rustup target add x86_64-unknown-linux-gnu`. -3. Install the `crossbuild-essential-` package for the architecture you are targetting +3. Install the `crossbuild-essential-` package for the architecture you are targeting - *`target=aarch64`*: `sudo apt install crossbuild-essential-arm64`. - *`target=x86_64`*: `sudo apt install crossbuild-essential-amd64`. @@ -126,7 +126,7 @@ On the steps on Debian-family are as follows: This will produce a `.so` in `./target//release/lib$yourext.so`, which you can use. -> *TODO: this seems like it is not quite complete -- we may need things like this (when targetting `aarch64` from `x86_64`)? Needs some slightly further investigation for _why_, though, since most of this should be auto-detected (notably the target and isystem paths...)* +> *TODO: this seems like it is not quite complete -- we may need things like this (when targeting `aarch64` from `x86_64`)? Needs some slightly further investigation for _why_, though, since most of this should be auto-detected (notably the target and isystem paths...)* > > ```sh > export BINDGEN_EXTRA_CLANG_ARGS_aarch64-unknown-linux-gnu="-target aarch64-unknown-linux-gnu -isystem /usr/aarch64-linux-gnu/include/ -ccc-gcc-name aarch64-linux-gnu-gcc" @@ -156,7 +156,7 @@ To cross compile, you need a toolchain. This is basically two parts: Pick well here, since getting a bad one may cause builds that succeed but fail at runtime. -An easy option for targetting `aarch64` (or several other architectures) from `x86_64` is to use one of the ones on (not an endorsement: they're something I've used for development, I don't know how well-made they are, and they honestly seem kind of idiosyncratic. IOW, I'd want to do a lot more research before putting them into production). +An easy option for targeting `aarch64` (or several other architectures) from `x86_64` is to use one of the ones on (not an endorsement: they're something I've used for development, I don't know how well-made they are, and they honestly seem kind of idiosyncratic. IOW, I'd want to do a lot more research before putting them into production). Sadly, I don't have a good option for an easily downloaded x86_64 toolchain that has tools built for aarch64. I've been using a manually built one, which isn't covered in this guide (TODO?). @@ -173,7 +173,7 @@ Anyway, once you have one of these you may need to put it somewhere specific -- ## Use the cross compilation toolchain -Continuing from above, I will assume (without loss of generality) that you're targetting aarch64, have a toolchain directory at `$toolchain_dir` and your sysroot is at `$sysroot_dir` -- try `$toolchain_dir/bin/aarch64-linux-gnu-gcc --print-sysroot`. +Continuing from above, I will assume (without loss of generality) that you're targeting aarch64, have a toolchain directory at `$toolchain_dir` and your sysroot is at `$sysroot_dir` -- try `$toolchain_dir/bin/aarch64-linux-gnu-gcc --print-sysroot`. Anyway, set diff --git a/pgrx-examples/aggregate/src/lib.rs b/pgrx-examples/aggregate/src/lib.rs index 5d0a4d09db..22af8c2b99 100644 --- a/pgrx-examples/aggregate/src/lib.rs +++ b/pgrx-examples/aggregate/src/lib.rs @@ -30,10 +30,10 @@ impl IntegerAvgState { mut current: ::State, arg: ::Args, ) -> ::State { - arg.map(|a| { - current.sum += a; + if let Some(arg) = arg { + current.sum += arg; current.n += 1; - }); + } current } diff --git a/pgrx-examples/bad_ideas/src/lib.rs b/pgrx-examples/bad_ideas/src/lib.rs index ae053188c5..0b9453a05b 100644 --- a/pgrx-examples/bad_ideas/src/lib.rs +++ b/pgrx-examples/bad_ideas/src/lib.rs @@ -59,10 +59,8 @@ fn exec<'a>( ) -> TableIterator<'static, (name!(status, Option), name!(stdout, String))> { let mut command = &mut Command::new(command); - for arg in args { - if let Some(arg) = arg { - command = command.arg(arg); - } + for arg in args.into_iter().flatten() { + command = command.arg(arg); } let output = command.output().expect("command failed"); diff --git a/pgrx-examples/custom_libname/Cargo.toml b/pgrx-examples/custom_libname/Cargo.toml index b24057d7d2..5e77564e82 100644 --- a/pgrx-examples/custom_libname/Cargo.toml +++ b/pgrx-examples/custom_libname/Cargo.toml @@ -32,7 +32,7 @@ pg16 = ["pgrx/pg16", "pgrx-tests/pg16" ] pg_test = [] [dependencies] -pgrx = { path = "../../pgrx/", default-features = false } +pgrx = { path = "../../pgrx", default-features = false } [dev-dependencies] pgrx-tests = { path = "../../pgrx-tests" } diff --git a/pgrx-examples/custom_types/src/hexint.rs b/pgrx-examples/custom_types/src/hexint.rs index 6dae3c282a..492548edf6 100644 --- a/pgrx-examples/custom_types/src/hexint.rs +++ b/pgrx-examples/custom_types/src/hexint.rs @@ -133,7 +133,7 @@ fn hexint_to_int(hexint: HexInt) -> Result> { #[pg_extern(immutable, parallel_safe)] fn hexint_to_numeric(hexint: HexInt) -> Result> { - Ok(hexint.value.try_into()?) + Ok(hexint.value.into()) } #[pg_extern(immutable, parallel_safe)] diff --git a/pgrx-examples/strings/Cargo.toml b/pgrx-examples/strings/Cargo.toml index 6ff957b8b1..e5fcd671f7 100644 --- a/pgrx-examples/strings/Cargo.toml +++ b/pgrx-examples/strings/Cargo.toml @@ -31,7 +31,7 @@ pg16 = ["pgrx/pg16", "pgrx-tests/pg16" ] pg_test = [] [dependencies] -pgrx = { path = "../../pgrx/", default-features = false } +pgrx = { path = "../../pgrx", default-features = false } [dev-dependencies] pgrx-tests = { path = "../../pgrx-tests" } diff --git a/pgrx-examples/triggers/Cargo.toml b/pgrx-examples/triggers/Cargo.toml index 906ac269e4..81818a4987 100644 --- a/pgrx-examples/triggers/Cargo.toml +++ b/pgrx-examples/triggers/Cargo.toml @@ -31,7 +31,7 @@ pg16 = ["pgrx/pg16", "pgrx-tests/pg16" ] pg_test = [] [dependencies] -pgrx = { path = "../../pgrx/", default-features = false } +pgrx = { path = "../../pgrx", default-features = false } thiserror = "1.0" [dev-dependencies] diff --git a/pgrx-examples/versioned_custom_libname_so/Cargo.toml b/pgrx-examples/versioned_custom_libname_so/Cargo.toml index 2054735acc..319e0ea145 100644 --- a/pgrx-examples/versioned_custom_libname_so/Cargo.toml +++ b/pgrx-examples/versioned_custom_libname_so/Cargo.toml @@ -32,7 +32,7 @@ pg16 = ["pgrx/pg16", "pgrx-tests/pg16" ] pg_test = [] [dependencies] -pgrx = { path = "../../pgrx/", default-features = false } +pgrx = { path = "../../pgrx", default-features = false } [dev-dependencies] pgrx-tests = { path = "../../pgrx-tests" } diff --git a/pgrx-examples/versioned_so/Cargo.toml b/pgrx-examples/versioned_so/Cargo.toml index 367f7f6ce4..57f5e0f1a1 100644 --- a/pgrx-examples/versioned_so/Cargo.toml +++ b/pgrx-examples/versioned_so/Cargo.toml @@ -31,7 +31,7 @@ pg16 = ["pgrx/pg16", "pgrx-tests/pg16" ] pg_test = [] [dependencies] -pgrx = { path = "../../pgrx/", default-features = false } +pgrx = { path = "../../pgrx", default-features = false } [dev-dependencies] pgrx-tests = { path = "../../pgrx-tests" } diff --git a/pgrx-macros/src/lib.rs b/pgrx-macros/src/lib.rs index af14c2b2c8..2ad9114f47 100644 --- a/pgrx-macros/src/lib.rs +++ b/pgrx-macros/src/lib.rs @@ -197,15 +197,12 @@ pub fn pg_cast(attr: TokenStream, item: TokenStream) -> TokenStream { Ok(CodeEnrichment(pg_extern.as_cast(cast)).to_token_stream().into()) } - match wrapped(attr, item) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(attr, item).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } /// Declare a function as `#[pg_operator]` to indicate that it represents a Postgres operator @@ -291,15 +288,12 @@ pub fn pg_schema(_attr: TokenStream, input: TokenStream) -> TokenStream { Ok(pgrx_schema.to_token_stream().into()) } - match wrapped(input) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(input).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } /** @@ -434,15 +428,12 @@ pub fn extension_sql(input: TokenStream) -> TokenStream { Ok(ext_sql.to_token_stream().into()) } - match wrapped(input) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(input).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } /** @@ -479,15 +470,12 @@ pub fn extension_sql_file(input: TokenStream) -> TokenStream { Ok(ext_sql.to_token_stream().into()) } - match wrapped(input) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(input).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } /// Associated macro for `#[pg_extern]` or `#[macro@pg_operator]`. Used to set the `SEARCH_PATH` option @@ -642,15 +630,12 @@ pub fn pg_extern(attr: TokenStream, item: TokenStream) -> TokenStream { Ok(pg_extern_item.to_token_stream().into()) } - match wrapped(attr, item) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(attr, item).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } /** @@ -683,14 +668,11 @@ fn impl_postgres_enum(ast: DeriveInput) -> syn::Result let enum_name = enum_ident.to_string(); // validate that we're only operating on an enum - let enum_data = match ast.data { - Data::Enum(e) => e, - _ => { - return Err(syn::Error::new( - ast.span(), - "#[derive(PostgresEnum)] can only be applied to enums", - )) - } + let Data::Enum(enum_data) = ast.data else { + return Err(syn::Error::new( + ast.span(), + "#[derive(PostgresEnum)] can only be applied to enums", + )); }; let mut from_datum = proc_macro2::TokenStream::new(); @@ -1195,15 +1177,12 @@ pub fn pg_aggregate(_attr: TokenStream, item: TokenStream) -> TokenStream { } let parsed_base = parse_macro_input!(item as syn::ItemImpl); - match wrapped(parsed_base) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(parsed_base).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } /** @@ -1254,13 +1233,10 @@ pub fn pg_trigger(attrs: TokenStream, input: TokenStream) -> TokenStream { Ok(trigger_tokens.into()) } - match wrapped(attrs, input) { - Ok(tokens) => tokens, - Err(e) => { - let msg = e.to_string(); - TokenStream::from(quote! { - compile_error!(#msg); - }) - } - } + wrapped(attrs, input).unwrap_or_else(|e| { + let msg = e.to_string(); + TokenStream::from(quote! { + compile_error!(#msg); + }) + }) } diff --git a/pgrx-pg-sys/build.rs b/pgrx-pg-sys/build.rs index dbe2c640ef..6b4e1611f4 100644 --- a/pgrx-pg-sys/build.rs +++ b/pgrx-pg-sys/build.rs @@ -17,7 +17,7 @@ use quote::{quote, ToTokens}; use std::cmp::Ordering; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::fs; -use std::path::{self, PathBuf}; // disambiguate path::Path and syn::Type::Path +use std::path::{self, Path, PathBuf}; // disambiguate path::Path and syn::Type::Path use std::process::{Command, Output}; use syn::{ForeignItem, Item, ItemConst}; @@ -346,13 +346,13 @@ impl BuildPaths { fn write_rs_file( code: proc_macro2::TokenStream, - file: &PathBuf, + file: &Path, header: proc_macro2::TokenStream, ) -> eyre::Result<()> { let mut contents = header; contents.extend(code); - std::fs::write(file, contents.to_string())?; + fs::write(file, contents.to_string())?; rust_fmt(file) } @@ -1067,7 +1067,7 @@ fn apply_pg_guard(items: &Vec) -> eyre::Result eyre::Result<()> { +fn rust_fmt(path: &Path) -> eyre::Result<()> { // We shouldn't hit this path in a case where we care about it, but... just // in case we probably should respect RUSTFMT. let rustfmt = env_tracked("RUSTFMT").unwrap_or_else(|| "rustfmt".into()); diff --git a/pgrx-pg-sys/src/submodules/htup.rs b/pgrx-pg-sys/src/submodules/htup.rs index c238688e3f..95b573c8c8 100644 --- a/pgrx-pg-sys/src/submodules/htup.rs +++ b/pgrx-pg-sys/src/submodules/htup.rs @@ -257,21 +257,17 @@ unsafe fn fetch_att(T: *mut std::os::raw::c_char, attbyval: bool, attlen: i16) - // NB: Compiler should solve this branch for us, and we write it like this to avoid // code duplication for the case where a Datum isn't 8 bytes wide - if SIZEOF_DATUM == 8 { - if attlen == std::mem::size_of::() { - return *T.cast::(); - } + if SIZEOF_DATUM == 8 && attlen == std::mem::size_of::() { + return *T.cast::(); } if attlen == std::mem::size_of::() { Datum::from(*T.cast::()) + } else if attlen == std::mem::size_of::() { + Datum::from(*T.cast::()) } else { - if attlen == std::mem::size_of::() { - Datum::from(*T.cast::()) - } else { - assert_eq!(attlen, 1); - Datum::from(*T.cast::()) - } + assert_eq!(attlen, 1); + Datum::from(*T.cast::()) } } else { Datum::from(T.cast::()) diff --git a/pgrx-pg-sys/src/submodules/panic.rs b/pgrx-pg-sys/src/submodules/panic.rs index f6339f5efe..7dd0f7d58f 100644 --- a/pgrx-pg-sys/src/submodules/panic.rs +++ b/pgrx-pg-sys/src/submodules/panic.rs @@ -43,19 +43,16 @@ where /// be an [`ErrorReport`], then that is specifically raised. Otherwise it's just a general /// [`ereport!`] as a [`PgLogLevel::ERROR`]. fn report(self) -> Self::Inner { - match self { - Ok(value) => value, - Err(e) => { - let any: Box<&dyn Any> = Box::new(&e); - if any.downcast_ref::().is_some() { - let any: Box = Box::new(e); - any.downcast::().unwrap().report(PgLogLevel::ERROR); - unreachable!(); - } else { - ereport!(ERROR, PgSqlErrorCode::ERRCODE_DATA_EXCEPTION, &format!("{e}")); - } + self.unwrap_or_else(|e| { + let any: Box<&dyn Any> = Box::new(&e); + if any.downcast_ref::().is_some() { + let any: Box = Box::new(e); + any.downcast::().unwrap().report(PgLogLevel::ERROR); + unreachable!(); + } else { + ereport!(ERROR, PgSqlErrorCode::ERRCODE_DATA_EXCEPTION, &format!("{e}")); } - } + }) } } diff --git a/pgrx-sql-entity-graph/src/to_sql/mod.rs b/pgrx-sql-entity-graph/src/to_sql/mod.rs index aeb8780e11..8acab3de8b 100644 --- a/pgrx-sql-entity-graph/src/to_sql/mod.rs +++ b/pgrx-sql-entity-graph/src/to_sql/mod.rs @@ -99,28 +99,22 @@ impl ToSqlConfig { continue; } - match nv.value { - ArgValue::Path(ref callback_path) => { - return Ok(Some(Self { - enabled: true, - callback: Some(callback_path.clone()), - content: None, - })); - } + return match nv.value { + ArgValue::Path(ref callback_path) => Ok(Some(Self { + enabled: true, + callback: Some(callback_path.clone()), + content: None, + })), ArgValue::Lit(Lit::Bool(ref b)) => { - return Ok(Some(Self { enabled: b.value, callback: None, content: None })); + Ok(Some(Self { enabled: b.value, callback: None, content: None })) } ArgValue::Lit(Lit::Str(ref s)) => { - return Ok(Some(Self { - enabled: true, - callback: None, - content: Some(s.clone()), - })); + Ok(Some(Self { enabled: true, callback: None, content: Some(s.clone()) })) } ArgValue::Lit(ref other) => { - return Err(syn::Error::new(other.span(), INVALID_ATTR_CONTENT)); + Err(syn::Error::new(other.span(), INVALID_ATTR_CONTENT)) } - } + }; } } diff --git a/pgrx-tests/src/framework.rs b/pgrx-tests/src/framework.rs index d25ea6ad79..786183ec3f 100644 --- a/pgrx-tests/src/framework.rs +++ b/pgrx-tests/src/framework.rs @@ -547,10 +547,8 @@ fn monitor_pg(mut command: Command, cmd_string: String, loglines: LogLines) -> S let mut is_started_yet = false; let mut lines = reader.lines(); while let Some(Ok(line)) = lines.next() { - let session_id = match get_named_capture(®ex, "session_id", &line) { - Some(sid) => sid, - None => "NONE".to_string(), - }; + let session_id = get_named_capture(®ex, "session_id", &line) + .unwrap_or_else(|| "NONE".to_string()); if line.contains("database system is ready to accept connections") { // Postgres says it's ready to go diff --git a/pgrx-tests/src/tests/default_arg_value_tests.rs b/pgrx-tests/src/tests/default_arg_value_tests.rs index a4e18e8e71..14ef1e35f2 100644 --- a/pgrx-tests/src/tests/default_arg_value_tests.rs +++ b/pgrx-tests/src/tests/default_arg_value_tests.rs @@ -21,10 +21,7 @@ fn default_argument(a: default!(i32, 99)) -> i32 { #[pg_extern] fn option_default_argument(a: default!(Option<&str>, "NULL")) -> &str { - match a { - Some(a) => a, - None => "got default of null", - } + a.unwrap_or("got default of null") } #[cfg(any(test, feature = "pg_test"))] diff --git a/pgrx-tests/src/tests/fcinfo_tests.rs b/pgrx-tests/src/tests/fcinfo_tests.rs index 7d07f32d42..4a86424816 100644 --- a/pgrx-tests/src/tests/fcinfo_tests.rs +++ b/pgrx-tests/src/tests/fcinfo_tests.rs @@ -58,10 +58,7 @@ fn takes_char(i: char) -> char { #[pg_extern] fn takes_option(i: Option) -> i32 { - match i { - Some(i) => i, - None => -1, - } + i.unwrap_or(-1) } #[pg_extern] diff --git a/pgrx-tests/src/tests/heap_tuple.rs b/pgrx-tests/src/tests/heap_tuple.rs index 31fc73658e..2032e75744 100644 --- a/pgrx-tests/src/tests/heap_tuple.rs +++ b/pgrx-tests/src/tests/heap_tuple.rs @@ -569,7 +569,7 @@ mod sql_generator_tests { type OrderedSetArgs = name!(percentile, pgrx::composite_type!('static, "Dog")); fn state( - mut _current: Self::State, + _current: Self::State, _arg: Self::Args, _fcinfo: pg_sys::FunctionCallInfo, ) -> Self::State { @@ -577,7 +577,7 @@ mod sql_generator_tests { } fn finalize( - mut _current: Self::State, + _current: Self::State, _direct_arg: Self::OrderedSetArgs, _fcinfo: pg_sys::FunctionCallInfo, ) -> Self::Finalize { @@ -595,7 +595,7 @@ mod sql_generator_tests { type MovingState = pgrx::composite_type!('static, "Dog"); fn state( - mut _current: Self::State, + _current: Self::State, _arg: Self::Args, _fcinfo: pg_sys::FunctionCallInfo, ) -> Self::State { @@ -611,7 +611,7 @@ mod sql_generator_tests { } fn moving_state_inverse( - mut _current: Self::State, + _current: Self::State, _arg: Self::Args, _fcinfo: pg_sys::FunctionCallInfo, ) -> Self::MovingState { @@ -619,7 +619,7 @@ mod sql_generator_tests { } fn combine( - mut _first: Self::State, + _first: Self::State, _second: Self::State, _fcinfo: pg_sys::FunctionCallInfo, ) -> Self::State { diff --git a/pgrx-tests/src/tests/numeric_tests.rs b/pgrx-tests/src/tests/numeric_tests.rs index 0543ee753a..87b36521bc 100644 --- a/pgrx-tests/src/tests/numeric_tests.rs +++ b/pgrx-tests/src/tests/numeric_tests.rs @@ -30,7 +30,7 @@ mod tests { #[pg_extern] fn return_a_u64_numeric() -> AnyNumeric { - AnyNumeric::try_from(std::u64::MAX).unwrap() + AnyNumeric::try_from(u64::MAX).unwrap() } #[pg_test] diff --git a/pgrx/src/bgworkers.rs b/pgrx/src/bgworkers.rs index c4c9702494..fe2bbedba0 100644 --- a/pgrx/src/bgworkers.rs +++ b/pgrx/src/bgworkers.rs @@ -600,7 +600,7 @@ impl BackgroundWorkerBuilder { /// Once properly configured, call `load_dynamic()` to get the BackgroundWorker registered and started dynamically. /// Start up might fail, e.g. if max_worker_processes is exceeded. In that case an Err is returned. - pub fn load_dynamic(self: Self) -> Result { + pub fn load_dynamic(self) -> Result { let mut bgw: pg_sys::BackgroundWorker = (&self).into(); let mut handle: *mut pg_sys::BackgroundWorkerHandle = null_mut(); diff --git a/pgrx/src/callbacks.rs b/pgrx/src/callbacks.rs index 06db44fa52..275cc0b788 100644 --- a/pgrx/src/callbacks.rs +++ b/pgrx/src/callbacks.rs @@ -28,7 +28,7 @@ pub enum PgXactCallbackEvent { /// cause the Postgres backend to abort. Abort, - /// Fired when a transcation is committed. It is mutually exclusive with `PgrxactCallbackEvent::Abort` + /// Fired when a transaction is committed. It is mutually exclusive with `PgrxactCallbackEvent::Abort` /// /// ## Safety /// @@ -114,7 +114,7 @@ type CallbackMap = /// they were registered. /// /// Registered callbacks only remain registered for the life of a single transaction. Registration -/// of permanet callbacks should be done through the unsafe `pg_sys::RegisterXactCallback()` function. +/// of permanent callbacks should be done through the unsafe `pg_sys::RegisterXactCallback()` function. /// /// /// ## Examples diff --git a/pgrx/src/datum/anyarray.rs b/pgrx/src/datum/anyarray.rs index ee45c34bc1..e8fbab87f5 100644 --- a/pgrx/src/datum/anyarray.rs +++ b/pgrx/src/datum/anyarray.rs @@ -15,7 +15,7 @@ use pgrx_sql_entity_graph::metadata::{ /// The [`anyarray` polymorphic pseudo-type][anyarray]. /// // rustdoc doesn't directly support a warning block: https://github.com/rust-lang/rust/issues/73935 -/// **Warning**: Calling [`FromDatum::from_datum`] with this type will unconditonally panic. Call +/// **Warning**: Calling [`FromDatum::from_datum`] with this type will unconditionally panic. Call /// [`FromDatum::from_polymorphic_datum`] with a type ID instead. /// /// [anyarray]: https://www.postgresql.org/docs/current/extend-type-system.html#EXTEND-TYPES-POLYMORPHIC diff --git a/pgrx/src/datum/anyelement.rs b/pgrx/src/datum/anyelement.rs index d84fa31ae1..f4733831ac 100644 --- a/pgrx/src/datum/anyelement.rs +++ b/pgrx/src/datum/anyelement.rs @@ -15,7 +15,7 @@ use pgrx_sql_entity_graph::metadata::{ /// The [`anyelement` polymorphic pseudo-type][anyelement]. /// // rustdoc doesn't directly support a warning block: https://github.com/rust-lang/rust/issues/73935 -/// **Warning**: Calling [`FromDatum::from_datum`] with this type will unconditonally panic. Call +/// **Warning**: Calling [`FromDatum::from_datum`] with this type will unconditionally panic. Call /// [`FromDatum::from_polymorphic_datum`] with a type ID instead. /// /// [anyelement]: https://www.postgresql.org/docs/current/extend-type-system.html#EXTEND-TYPES-POLYMORPHIC diff --git a/pgrx/src/datum/array.rs b/pgrx/src/datum/array.rs index bd97fe7c2f..1621333dc5 100644 --- a/pgrx/src/datum/array.rs +++ b/pgrx/src/datum/array.rs @@ -109,9 +109,7 @@ where where S: Serializer, { - let iter = self.iter(); - let result = serializer.collect_seq(iter); - result + serializer.collect_seq(self.iter()) } } @@ -957,6 +955,8 @@ where #[allow(clippy::get_first)] // https://github.com/pgcentralfoundation/pgrx/issues/1363 fn composite_type_oid(&self) -> Option { // the composite type oid for a vec of composite types is the array type of the base composite type + // the use of first() would have presented a false certainty here: it's not actually relevant that it be the first. + #[allow(clippy::get_first)] self.get(0) .and_then(|v| v.composite_type_oid().map(|oid| unsafe { pg_sys::get_array_type(oid) })) } diff --git a/pgrx/src/datum/datetime_support/ctor.rs b/pgrx/src/datum/datetime_support/ctor.rs index c564727f65..7bcf98ae37 100644 --- a/pgrx/src/datum/datetime_support/ctor.rs +++ b/pgrx/src/datum/datetime_support/ctor.rs @@ -113,6 +113,8 @@ pub fn to_timestamp(epoch_seconds: f64) -> TimestampWithTimeZone { /// SELECT date_bin('15 minutes', TIMESTAMP '2020-02-11 15:44:17', TIMESTAMP '2001-01-01 00:02:30'); /// Result: 2020-02-11 15:32:30 /// ``` +/// +/// TODO: See https://github.com/pgcentralfoundation/pgrx/pull/1414 #[cfg(any(features = "pg14", features = "pg15"))] pub fn date_bin( stride: crate::datum::interval::Interval, diff --git a/pgrx/src/datum/numeric.rs b/pgrx/src/datum/numeric.rs index 07b4d9b1d7..8a0d2dc53a 100644 --- a/pgrx/src/datum/numeric.rs +++ b/pgrx/src/datum/numeric.rs @@ -9,6 +9,7 @@ //LICENSE Use of this source code is governed by the MIT license that can be found in the LICENSE file. use core::ffi::CStr; use core::fmt::{Debug, Display, Formatter}; +use std::cmp::Ordering; use std::fmt; use std::iter::Sum; @@ -104,14 +105,10 @@ impl AnyNumeric { if self.is_nan() { Sign::NaN } else { - let zero: AnyNumeric = 0.try_into().unwrap(); - - if self < &zero { - Sign::Negative - } else if self > &zero { - Sign::Positive - } else { - Sign::Zero + match self.cmp(&0.into()) { + Ordering::Less => Sign::Negative, + Ordering::Greater => Sign::Positive, + Ordering::Equal => Sign::Zero, } } } diff --git a/pgrx/src/datum/numeric_support/cmp.rs b/pgrx/src/datum/numeric_support/cmp.rs index b9e3fa42b3..5235f83118 100644 --- a/pgrx/src/datum/numeric_support/cmp.rs +++ b/pgrx/src/datum/numeric_support/cmp.rs @@ -73,13 +73,7 @@ impl Ord for AnyNumeric { let cmp: i32 = unsafe { direct_function_call(pg_sys::numeric_cmp, &[self.as_datum(), other.as_datum()]).unwrap() }; - if cmp < 0 { - Ordering::Less - } else if cmp > 0 { - Ordering::Greater - } else { - Ordering::Equal - } + cmp.cmp(&0) } } diff --git a/pgrx/src/datum/range.rs b/pgrx/src/datum/range.rs index fbd45c4db2..01a8068741 100644 --- a/pgrx/src/datum/range.rs +++ b/pgrx/src/datum/range.rs @@ -246,10 +246,10 @@ where /// Returns `true` if the range is "infinite". This is equivalent to Rust's [`std::ops::RangeFull`] (`(..)`) #[inline] pub fn is_infinite(&self) -> bool { - match (self.lower(), self.upper()) { - (Some(RangeBound::Infinite), Some(RangeBound::Infinite)) => true, - _ => false, - } + matches!( + (self.lower(), self.upper()), + (Some(RangeBound::Infinite), Some(RangeBound::Infinite)) + ) } /// Consumes `self` and returns the internal representation, which can be easily mapped or diff --git a/pgrx/src/datum/tuples.rs b/pgrx/src/datum/tuples.rs index 030b718d61..3dd6709cde 100644 --- a/pgrx/src/datum/tuples.rs +++ b/pgrx/src/datum/tuples.rs @@ -58,19 +58,10 @@ where let b = vec.pop().unwrap(); let a = vec.pop().unwrap(); - let a_datum = if a.is_some() { - A::from_polymorphic_datum(a.unwrap(), false, A::type_oid()) - } else { - None - }; - - let b_datum = if b.is_some() { - B::from_polymorphic_datum(b.unwrap(), false, B::type_oid()) - } else { - None - }; - - Some((a_datum, b_datum)) + Some(( + a.and_then(|a| A::from_polymorphic_datum(a, false, A::type_oid())), + b.and_then(|b| B::from_polymorphic_datum(b, false, B::type_oid())), + )) } } @@ -94,24 +85,10 @@ where let b = vec.pop().unwrap(); let a = vec.pop().unwrap(); - let a_datum = if a.is_some() { - A::from_polymorphic_datum(a.unwrap(), false, A::type_oid()) - } else { - None - }; - - let b_datum = if b.is_some() { - B::from_polymorphic_datum(b.unwrap(), false, B::type_oid()) - } else { - None - }; - - let c_datum = if c.is_some() { - C::from_polymorphic_datum(c.unwrap(), false, C::type_oid()) - } else { - None - }; - - Some((a_datum, b_datum, c_datum)) + Some(( + a.and_then(|a| A::from_polymorphic_datum(a, false, A::type_oid())), + b.and_then(|b| B::from_polymorphic_datum(b, false, B::type_oid())), + c.and_then(|c| C::from_polymorphic_datum(c, false, C::type_oid())), + )) } } diff --git a/pgrx/src/fn_call.rs b/pgrx/src/fn_call.rs index 3512f4fa0c..c5365ae3ff 100644 --- a/pgrx/src/fn_call.rs +++ b/pgrx/src/fn_call.rs @@ -72,7 +72,7 @@ pub enum FnCallError { #[error("The specified function exists, but has overloaded versions which are ambiguous given the argument types provided")] AmbiguousFunction, - #[error("Can only dymamically call plain functions")] + #[error("Can only dynamically call plain functions")] UnsupportedFunctionType, #[error("Functions with OUT/IN_OUT/TABLE arguments are not supported")] @@ -297,7 +297,7 @@ pub fn fn_call_with_collation( // of `nargs` `NullableDatum` instances. let args_slice = fcinfo_ref.args.as_mut_slice(nargs); for (i, datum) in arg_datums.into_iter().enumerate() { - assert!(!isstrict || (isstrict && datum.is_some())); // no NULL datums if this function is STRICT + assert!(!isstrict || datum.is_some()); // no NULL datums if this function is STRICT let arg = &mut args_slice[i]; (arg.value, arg.isnull) = diff --git a/pgrx/src/list/flat_list.rs b/pgrx/src/list/flat_list.rs index 71816a57b2..b2bcc35e3d 100644 --- a/pgrx/src/list/flat_list.rs +++ b/pgrx/src/list/flat_list.rs @@ -113,7 +113,7 @@ impl<'cx, T: Enlist> List<'cx, T> { let list_size = 128; unsafe { let list: *mut pg_sys::List = mcx.alloc_bytes(list_size).cast(); - assert_ne!(list, ptr::null_mut()); + assert!(list.is_non_null()); (*list).type_ = T::LIST_TAG; (*list).max_length = ((list_size - mem::size_of::()) / mem::size_of::()) diff --git a/pgrx/src/list/linked_list.rs b/pgrx/src/list/linked_list.rs index 07fa49ae9e..960ff71b39 100644 --- a/pgrx/src/list/linked_list.rs +++ b/pgrx/src/list/linked_list.rs @@ -235,7 +235,7 @@ impl<'cx, T: Enlist> List<'cx, T> { } }; let iter = RawCellIter { - ptr: if drain_prefix == ptr::null_mut() { + ptr: if drain_prefix.is_null() { (*raw).head.cast() } else { (*drain_prefix).cell.next.cast() @@ -314,11 +314,11 @@ unsafe fn cons_cell(list: &mut pg_sys::List, value: T) -> *mut pg_sys // Let's try to maintain all the node cells in the same context, shall we? // Even though Postgres won't... let context = pg_sys::GetMemoryChunkContext(list as *mut _ as *mut _); - if context == ptr::null_mut() { + if context.is_null() { panic!("Context free list?"); }; let buf: *mut pg_sys::ListCell = pg_sys::MemoryContextAlloc(context, alloc_size).cast(); - if buf == ptr::null_mut() { + if buf.is_null() { panic!("List allocation failure"); } let cell_ptr = T::apoptosis(buf); @@ -392,7 +392,7 @@ pub struct Drain<'a, 'cx, T> { impl Drop for Drain<'_, '_, T> { fn drop(&mut self) { - if self.raw == ptr::null_mut() { + if self.raw.is_null() { return; } @@ -406,7 +406,7 @@ impl Drop for Drain<'_, '_, T> { } else { // Need to weld over the drained part and fix the length // Collect the first deallocation candidate - let mut to_dealloc = if self.drain_prefix == ptr::null_mut() { + let mut to_dealloc = if self.drain_prefix.is_null() { let dealloc = (*self.raw).head; (*self.raw).head = self.iter.ptr.cast(); dealloc diff --git a/pgrx/src/memcx.rs b/pgrx/src/memcx.rs index 789ce844b7..4803ee598d 100644 --- a/pgrx/src/memcx.rs +++ b/pgrx/src/memcx.rs @@ -42,6 +42,6 @@ where F: for<'clos> FnOnce(&'clos MemCx<'curr>) -> T, { let memcx = unsafe { MemCx::from_ptr(pg_sys::CurrentMemoryContext) }; - let ret = { f(&memcx) }; - ret + + f(&memcx) } diff --git a/pgrx/src/memcxt.rs b/pgrx/src/memcxt.rs index eac920b098..fbb75ddaba 100644 --- a/pgrx/src/memcxt.rs +++ b/pgrx/src/memcxt.rs @@ -269,15 +269,12 @@ impl PgMemoryContexts { pub unsafe fn set_as_current(&mut self) -> PgMemoryContexts { let old_context = pg_sys::CurrentMemoryContext; - match self { - PgMemoryContexts::Owned(mc) => { - // If the context is set as current while it's already current, - // don't update `previous` as it'll self-reference instead. - if old_context != mc.owned { - mc.previous = old_context; - } + if let PgMemoryContexts::Owned(mc) = self { + // If the context is set as current while it's already current, + // don't update `previous` as it'll self-reference instead. + if old_context != mc.owned { + mc.previous = old_context; } - _ => {} } pg_sys::CurrentMemoryContext = self.value(); diff --git a/pgrx/src/spi/tuple.rs b/pgrx/src/spi/tuple.rs index 3bb2b3db2a..8e641bf6db 100644 --- a/pgrx/src/spi/tuple.rs +++ b/pgrx/src/spi/tuple.rs @@ -330,7 +330,7 @@ impl<'conn> SpiHeapTupleData<'conn> { unsafe { // SAFETY: we know tupdesc is not null let natts = (*tupdesc).natts; - data.entries.reserve(usize::try_from(natts as usize).unwrap_or_default()); + data.entries.reserve(usize::try_from(natts).unwrap()); for i in 1..=natts { let mut is_null = false; let datum = pg_sys::SPI_getbinval(htup, tupdesc as _, i, &mut is_null); diff --git a/pgrx/src/trigger_support/pg_trigger_level.rs b/pgrx/src/trigger_support/pg_trigger_level.rs index 321273cba4..a72e9cdc82 100644 --- a/pgrx/src/trigger_support/pg_trigger_level.rs +++ b/pgrx/src/trigger_support/pg_trigger_level.rs @@ -1,3 +1,5 @@ +use core::fmt::{Display, Formatter}; +use std::fmt; //LICENSE Portions Copyright 2019-2021 ZomboDB, LLC. //LICENSE //LICENSE Portions Copyright 2021-2023 Technology Concepts & Design, Inc. @@ -33,12 +35,11 @@ impl From for PgTriggerLevel { } } -impl ToString for PgTriggerLevel { - fn to_string(&self) -> String { - match self { - PgTriggerLevel::Statement => "STATEMENT", +impl Display for PgTriggerLevel { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(match self { PgTriggerLevel::Row => "ROW", - } - .to_string() + PgTriggerLevel::Statement => "STATEMENT", + }) } } diff --git a/pgrx/src/trigger_support/pg_trigger_option.rs b/pgrx/src/trigger_support/pg_trigger_option.rs index 2f7fea88ae..0f483c1fdd 100644 --- a/pgrx/src/trigger_support/pg_trigger_option.rs +++ b/pgrx/src/trigger_support/pg_trigger_option.rs @@ -1,3 +1,5 @@ +use core::fmt::{Display, Formatter}; +use std::fmt; //LICENSE Portions Copyright 2019-2021 ZomboDB, LLC. //LICENSE //LICENSE Portions Copyright 2021-2023 Technology Concepts & Design, Inc. @@ -41,14 +43,13 @@ impl TryFrom for PgTriggerOperation { } } -impl ToString for PgTriggerOperation { - fn to_string(&self) -> String { - match self { +impl Display for PgTriggerOperation { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(match self { PgTriggerOperation::Insert => "INSERT", PgTriggerOperation::Update => "UPDATE", PgTriggerOperation::Delete => "DELETE", PgTriggerOperation::Truncate => "TRUNCATE", - } - .to_string() + }) } } diff --git a/pgrx/src/trigger_support/pg_trigger_when.rs b/pgrx/src/trigger_support/pg_trigger_when.rs index 58d42926d9..8a88b1e5b7 100644 --- a/pgrx/src/trigger_support/pg_trigger_when.rs +++ b/pgrx/src/trigger_support/pg_trigger_when.rs @@ -1,3 +1,5 @@ +use core::fmt::{Display, Formatter}; +use std::fmt; //LICENSE Portions Copyright 2019-2021 ZomboDB, LLC. //LICENSE //LICENSE Portions Copyright 2021-2023 Technology Concepts & Design, Inc. @@ -38,13 +40,12 @@ impl TryFrom for PgTriggerWhen { } } -impl ToString for PgTriggerWhen { - fn to_string(&self) -> String { - match self { +impl Display for PgTriggerWhen { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(match self { PgTriggerWhen::Before => "BEFORE", PgTriggerWhen::After => "AFTER", PgTriggerWhen::InsteadOf => "INSTEAD OF", - } - .to_string() + }) } }