Clean up codebase: fix reasonable clippy warnings.

This commit is a codebase-wide cleanup driven by clippy warnings. In
addition to fixing every reasonable warning, the following new
functionality was introduced:

  * `Accept::new()` now takes any `T: Into<QMediaType>` iterator.
  * `TempFile::is_empty()` was added.
  * `HeaderMap` now implements `IntoIterator`.

This commit makes the following breaking changes:

  * The `IntoCollection` trait is gone. Generics previously bound by the
    trait are now bound by `IntoIterator`. This affects:
    - `Accept::new()`
    - `ContentType::with_params()`
    - `Permission::{allow, allowed}()`
  * `MediaType`, `QMediaType`, and `Allow` implement `IntoIterator`,
    enabling most existing code to continue working without change.
  * The inherent `HeaderMap::into_iter()` method was removed.
  * The `Ok` variant in ErrorKind::Liftoff` is now `Box<Rocket<Orbit>>`.
This commit is contained in:
Sergio Benitez 2024-03-20 00:00:33 -07:00
parent d9249db6d6
commit 02011a1307
98 changed files with 453 additions and 393 deletions

View File

@ -12,6 +12,10 @@ license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
rust-version = "1.75" rust-version = "1.75"
[lints.clippy]
type_complexity = "allow"
multiple_bound_locations = "allow"
[features] [features]
tera = ["tera_"] tera = ["tera_"]
handlebars = ["handlebars_"] handlebars = ["handlebars_"]

View File

@ -84,7 +84,7 @@ impl Context {
if !templates.contains_key(name) { if !templates.contains_key(name) {
let data_type = Path::new(name).extension() let data_type = Path::new(name).extension()
.and_then(|osstr| osstr.to_str()) .and_then(|osstr| osstr.to_str())
.and_then(|ext| ContentType::from_extension(ext)) .and_then(ContentType::from_extension)
.unwrap_or(ContentType::Text); .unwrap_or(ContentType::Text);
let info = TemplateInfo { path: None, engine_ext, data_type }; let info = TemplateInfo { path: None, engine_ext, data_type };
@ -183,7 +183,7 @@ mod manager {
if let Some(true) = templates_changes { if let Some(true) = templates_changes {
info_!("Change detected: reloading templates."); info_!("Change detected: reloading templates.");
let root = self.context().root.clone(); let root = self.context().root.clone();
if let Some(new_ctxt) = Context::initialize(&root, &callback) { if let Some(new_ctxt) = Context::initialize(&root, callback) {
*self.context_mut() = new_ctxt; *self.context_mut() = new_ctxt;
} else { } else {
warn_!("An error occurred while reloading templates."); warn_!("An error occurred while reloading templates.");
@ -217,7 +217,7 @@ fn split_path(root: &Path, path: &Path) -> (String, Option<String>) {
// Ensure template name consistency on Windows systems // Ensure template name consistency on Windows systems
if cfg!(windows) { if cfg!(windows) {
name = name.replace("\\", "/"); name = name.replace('\\', "/");
} }
(name, data_type.map(|d| d.to_string_lossy().into_owned())) (name, data_type.map(|d| d.to_string_lossy().into_owned()))

View File

@ -20,7 +20,7 @@ impl Engine for Handlebars<'static> {
} }
} }
ok.then(|| hb) ok.then_some(hb)
} }
fn render<C: Serialize>(&self, name: &str, context: C) -> Option<String> { fn render<C: Serialize>(&self, name: &str, context: C) -> Option<String> {

View File

@ -412,7 +412,7 @@ impl Template {
Status::InternalServerError Status::InternalServerError
})?; })?;
let string = ctxt.engines.render(name, &info, value).ok_or_else(|| { let string = ctxt.engines.render(name, info, value).ok_or_else(|| {
error_!("Template '{}' failed to render.", name); error_!("Template '{}' failed to render.", name);
Status::InternalServerError Status::InternalServerError
})?; })?;

View File

@ -109,8 +109,8 @@ impl WebSocket {
/// })) /// }))
/// } /// }
/// ``` /// ```
pub fn channel<'r, F: Send + 'r>(self, handler: F) -> Channel<'r> pub fn channel<'r, F>(self, handler: F) -> Channel<'r>
where F: FnOnce(DuplexStream) -> BoxFuture<'r, Result<()>> + 'r where F: FnOnce(DuplexStream) -> BoxFuture<'r, Result<()>> + Send + 'r
{ {
Channel { ws: self, handler: Box::new(handler), } Channel { ws: self, handler: Box::new(handler), }
} }

View File

@ -15,6 +15,10 @@ rust-version = "1.75"
[lib] [lib]
proc-macro = true proc-macro = true
[lints.clippy]
manual_range_contains = "allow"
large_enum_variant = "allow"
[dependencies] [dependencies]
indexmap = "2" indexmap = "2"
quote = "1.0" quote = "1.0"

View File

@ -16,7 +16,7 @@ fn _async_bound(
let mut func: TraitItemFn = syn::parse(input)?; let mut func: TraitItemFn = syn::parse(input)?;
let original: TraitItemFn = func.clone(); let original: TraitItemFn = func.clone();
if !func.sig.asyncness.is_some() { if func.sig.asyncness.is_none() {
let diag = Span::call_site() let diag = Span::call_site()
.error("attribute can only be applied to async fns") .error("attribute can only be applied to async fns")
.span_help(func.sig.span(), "this fn declaration must be `async`"); .span_help(func.sig.span(), "this fn declaration must be `async`");

View File

@ -31,7 +31,7 @@ impl Dynamic {
segment: &str, segment: &str,
span: Span, span: Span,
) -> Result<Self, Error<'_>> { ) -> Result<Self, Error<'_>> {
match Parameter::parse::<P>(&segment, span)? { match Parameter::parse::<P>(segment, span)? {
Parameter::Dynamic(d) | Parameter::Ignored(d) => Ok(d), Parameter::Dynamic(d) | Parameter::Ignored(d) => Ok(d),
Parameter::Guard(g) => Ok(g.source), Parameter::Guard(g) => Ok(g.source),
Parameter::Static(_) => Err(Error::new(segment, span, ErrorKind::Static)), Parameter::Static(_) => Err(Error::new(segment, span, ErrorKind::Static)),

View File

@ -293,7 +293,7 @@ fn sentinels_expr(route: &Route) -> TokenStream {
let eligible_types = route.guards() let eligible_types = route.guards()
.map(|guard| &guard.ty) .map(|guard| &guard.ty)
.chain(ret_ty.as_ref().into_iter()) .chain(ret_ty.as_ref())
.flat_map(|ty| ty.unfold_with_ty_macros(TY_MACS, ty_mac_mapper)) .flat_map(|ty| ty.unfold_with_ty_macros(TY_MACS, ty_mac_mapper))
.filter(|ty| ty.is_concrete(&generic_idents)) .filter(|ty| ty.is_concrete(&generic_idents))
.map(|child| (child.parent, child.ty)); .map(|child| (child.parent, child.ty));

View File

@ -180,7 +180,7 @@ impl Route {
// Collect all of the declared dynamic route parameters. // Collect all of the declared dynamic route parameters.
let all_dyn_params = path_params.iter().filter_map(|p| p.dynamic()) let all_dyn_params = path_params.iter().filter_map(|p| p.dynamic())
.chain(query_params.iter().filter_map(|p| p.dynamic())) .chain(query_params.iter().filter_map(|p| p.dynamic()))
.chain(data_guard.as_ref().map(|g| &g.source).into_iter()); .chain(data_guard.as_ref().map(|g| &g.source));
// Check for any duplicates in the dynamic route parameters. // Check for any duplicates in the dynamic route parameters.
let mut dyn_params: IndexSet<&Dynamic> = IndexSet::new(); let mut dyn_params: IndexSet<&Dynamic> = IndexSet::new();
@ -196,7 +196,7 @@ impl Route {
.filter(|(name, _)| { .filter(|(name, _)| {
let mut all_other_guards = path_params.iter().filter_map(|p| p.guard()) let mut all_other_guards = path_params.iter().filter_map(|p| p.guard())
.chain(query_params.iter().filter_map(|p| p.guard())) .chain(query_params.iter().filter_map(|p| p.guard()))
.chain(data_guard.as_ref().into_iter()); .chain(data_guard.as_ref());
all_other_guards.all(|g| &g.name != *name) all_other_guards.all(|g| &g.name != *name)
}) })

View File

@ -47,19 +47,19 @@ pub fn catchers_macro(input: proc_macro::TokenStream) -> TokenStream {
pub fn uri_macro(input: proc_macro::TokenStream) -> TokenStream { pub fn uri_macro(input: proc_macro::TokenStream) -> TokenStream {
uri::_uri_macro(input.into()) uri::_uri_macro(input.into())
.unwrap_or_else(|diag| diag.emit_as_expr_tokens_or(quote! { .unwrap_or_else(|diag| diag.emit_as_expr_tokens_or(quote! {
rocket::http::uri::Origin::ROOT rocket::http::uri::Origin::root()
})) }))
} }
pub fn uri_internal_macro(input: proc_macro::TokenStream) -> TokenStream { pub fn uri_internal_macro(input: proc_macro::TokenStream) -> TokenStream {
// TODO: Ideally we would generate a perfect `Origin::ROOT` so that we don't // TODO: Ideally we would generate a perfect `Origin::root()` so that we don't
// assist in propagating further errors. Alas, we can't set the span to the // assist in propagating further errors. Alas, we can't set the span to the
// invocation of `uri!` without access to `span.parent()`, and // invocation of `uri!` without access to `span.parent()`, and
// `Span::call_site()` here points to the `#[route]`, immediate caller, // `Span::call_site()` here points to the `#[route]`, immediate caller,
// generating a rather confusing error message when there's a type-mismatch. // generating a rather confusing error message when there's a type-mismatch.
uri::_uri_internal_macro(input.into()) uri::_uri_internal_macro(input.into())
.unwrap_or_else(|diag| diag.emit_as_expr_tokens_or(quote! { .unwrap_or_else(|diag| diag.emit_as_expr_tokens_or(quote! {
rocket::http::uri::Origin::ROOT rocket::http::uri::Origin::root()
})) }))
} }

View File

@ -15,10 +15,10 @@ pub fn _macro(input: proc_macro::TokenStream) -> devise::Result<TokenStream> {
fn entry_to_tests(root_glob: &LitStr) -> Result<Vec<TokenStream>, Box<dyn Error>> { fn entry_to_tests(root_glob: &LitStr) -> Result<Vec<TokenStream>, Box<dyn Error>> {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").expect("MANIFEST_DIR"); let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").expect("MANIFEST_DIR");
let full_glob = Path::new(&manifest_dir).join(&root_glob.value()).display().to_string(); let full_glob = Path::new(&manifest_dir).join(root_glob.value());
let mut tests = vec![]; let mut tests = vec![];
for path in glob::glob(&full_glob).map_err(Box::new)? { for path in glob::glob(&full_glob.to_string_lossy()).map_err(Box::new)? {
let path = path.map_err(Box::new)?; let path = path.map_err(Box::new)?;
let name = path.file_name() let name = path.file_name()
.and_then(|f| f.to_str()) .and_then(|f| f.to_str())
@ -38,7 +38,7 @@ fn entry_to_tests(root_glob: &LitStr) -> Result<Vec<TokenStream>, Box<dyn Error>
} }
if tests.is_empty() { if tests.is_empty() {
return Err(format!("glob '{}' evaluates to 0 files", full_glob).into()); return Err(format!("glob '{}' evaluates to 0 files", full_glob.display()).into());
} }
Ok(tests) Ok(tests)

View File

@ -151,7 +151,7 @@ fn explode_path<'a>(
Parameter::Dynamic(_) | Parameter::Guard(_) => { Parameter::Dynamic(_) | Parameter::Guard(_) => {
let (ident, ty) = args.next().expect("ident/ty for non-ignored"); let (ident, ty) = args.next().expect("ident/ty for non-ignored");
let expr = exprs.next().expect("one expr per dynamic arg"); let expr = exprs.next().expect("one expr per dynamic arg");
add_binding::<fmt::Path>(bindings, &ident, &ty, &expr); add_binding::<fmt::Path>(bindings, ident, ty, expr);
quote_spanned!(expr.span() => &#ident as &dyn #uri_display) quote_spanned!(expr.span() => &#ident as &dyn #uri_display)
} }
Parameter::Ignored(_) => { Parameter::Ignored(_) => {
@ -207,7 +207,7 @@ fn explode_query<'a>(
}; };
let name = &dynamic.name; let name = &dynamic.name;
add_binding::<fmt::Query>(bindings, &ident, &ty, &expr); add_binding::<fmt::Query>(bindings, ident, ty, expr);
Some(match dynamic.trailing { Some(match dynamic.trailing {
false => quote_spanned! { expr.span() => false => quote_spanned! { expr.span() =>
#query_arg::NameValue(#name, &#ident as &dyn #uri_display) #query_arg::NameValue(#name, &#ident as &dyn #uri_display)

View File

@ -421,10 +421,7 @@ impl RoutedUri {
impl Arg { impl Arg {
fn is_named(&self) -> bool { fn is_named(&self) -> bool {
match *self { matches!(self, Arg::Named(..))
Arg::Named(..) => true,
_ => false
}
} }
fn unnamed(&self) -> &ArgExpr { fn unnamed(&self) -> &ArgExpr {
@ -484,7 +481,7 @@ impl UriExpr {
let lit = input.parse::<StringLit>()?; let lit = input.parse::<StringLit>()?;
let uri = Uri::parse::<Origin<'_>>(&lit) let uri = Uri::parse::<Origin<'_>>(&lit)
.or_else(|e| Uri::parse::<Absolute<'_>>(&lit).map_err(|e2| (e, e2))) .or_else(|e| Uri::parse::<Absolute<'_>>(&lit).map_err(|e2| (e, e2)))
.map_err(|(e1, e2)| lit.starts_with('/').then(|| e1).unwrap_or(e2)) .map_err(|(e1, e2)| if lit.starts_with('/') { e1 } else { e2 })
.or_else(|e| uri_err(&lit, e))?; .or_else(|e| uri_err(&lit, e))?;
if matches!(&uri, Uri::Origin(o) if o.query().is_some()) if matches!(&uri, Uri::Origin(o) if o.query().is_some())

View File

@ -327,7 +327,7 @@ impl VisitMut for ValidationMutator<'_> {
*i = expr; *i = expr;
} }
} else if let Some(expr) = inner_field(&i) { } else if let Some(expr) = inner_field(i) {
*i = expr; *i = expr;
} }
@ -335,7 +335,7 @@ impl VisitMut for ValidationMutator<'_> {
} }
} }
pub fn validators<'v>(field: Field<'v>) -> Result<impl Iterator<Item = syn::Expr> + 'v> { pub fn validators(field: Field<'_>) -> Result<impl Iterator<Item = syn::Expr> + '_> {
Ok(FieldAttr::from_attrs(FieldAttr::NAME, &field.attrs)? Ok(FieldAttr::from_attrs(FieldAttr::NAME, &field.attrs)?
.into_iter() .into_iter()
.chain(FieldAttr::from_attrs(FieldAttr::NAME, field.parent.attrs())?) .chain(FieldAttr::from_attrs(FieldAttr::NAME, field.parent.attrs())?)
@ -396,7 +396,7 @@ fn default_expr(expr: &syn::Expr) -> TokenStream {
} }
} }
pub fn default<'v>(field: Field<'v>) -> Result<Option<TokenStream>> { pub fn default(field: Field<'_>) -> Result<Option<TokenStream>> {
let field_attrs = FieldAttr::from_attrs(FieldAttr::NAME, &field.attrs)?; let field_attrs = FieldAttr::from_attrs(FieldAttr::NAME, &field.attrs)?;
let parent_attrs = FieldAttr::from_attrs(FieldAttr::NAME, field.parent.attrs())?; let parent_attrs = FieldAttr::from_attrs(FieldAttr::NAME, field.parent.attrs())?;
@ -446,10 +446,12 @@ pub fn default<'v>(field: Field<'v>) -> Result<Option<TokenStream>> {
} }
} }
type Dup = (usize, Span, Span);
pub fn first_duplicate<K: Spanned, V: PartialEq + Spanned>( pub fn first_duplicate<K: Spanned, V: PartialEq + Spanned>(
keys: impl Iterator<Item = K> + Clone, keys: impl Iterator<Item = K> + Clone,
values: impl Fn(&K) -> Result<Vec<V>>, values: impl Fn(&K) -> Result<Vec<V>>,
) -> Result<Option<((usize, Span, Span), (usize, Span, Span))>> { ) -> Result<Option<(Dup, Dup)>> {
let (mut all_values, mut key_map) = (vec![], vec![]); let (mut all_values, mut key_map) = (vec![], vec![]);
for key in keys { for key in keys {
all_values.append(&mut values(&key)?); all_values.append(&mut values(&key)?);

View File

@ -79,7 +79,7 @@ pub fn derive_responder(input: proc_macro::TokenStream) -> TokenStream {
let responder = fields.iter().next().map(|f| { let responder = fields.iter().next().map(|f| {
let (accessor, ty) = (f.accessor(), f.ty.with_stripped_lifetimes()); let (accessor, ty) = (f.accessor(), f.ty.with_stripped_lifetimes());
quote_spanned! { f.span().into() => quote_spanned! { f.span() =>
let mut __res = <#ty as #_response::Responder>::respond_to( let mut __res = <#ty as #_response::Responder>::respond_to(
#accessor, __req #accessor, __req
)?; )?;

View File

@ -99,7 +99,6 @@ define_exported_paths! {
StaticCatcherInfo => ::rocket::StaticCatcherInfo, StaticCatcherInfo => ::rocket::StaticCatcherInfo,
Route => ::rocket::Route, Route => ::rocket::Route,
Catcher => ::rocket::Catcher, Catcher => ::rocket::Catcher,
SmallVec => ::rocket::http::private::SmallVec,
Status => ::rocket::http::Status, Status => ::rocket::http::Status,
} }

View File

@ -89,10 +89,10 @@ fn test_custom_formats() {
let client = Client::debug(rocket).unwrap(); let client = Client::debug(rocket).unwrap();
let foo_a = Accept::new([MediaType::new("application", "foo").into()]); let foo_a = Accept::new([MediaType::new("application", "foo")]);
let foo_ct = ContentType::new("application", "foo"); let foo_ct = ContentType::new("application", "foo");
let bar_baz_ct = ContentType::new("bar", "baz"); let bar_baz_ct = ContentType::new("bar", "baz");
let bar_baz_a = Accept::new([MediaType::new("bar", "baz").into()]); let bar_baz_a = Accept::new([MediaType::new("bar", "baz")]);
let response = client.get("/").header(foo_a).dispatch(); let response = client.get("/").header(foo_a).dispatch();
assert_eq!(response.into_string().unwrap(), "get_foo"); assert_eq!(response.into_string().unwrap(), "get_foo");

View File

@ -20,15 +20,20 @@ default = []
serde = ["uncased/with-serde-alloc", "serde_"] serde = ["uncased/with-serde-alloc", "serde_"]
uuid = ["uuid_"] uuid = ["uuid_"]
[lints.clippy]
module_inception = "allow"
multiple_bound_locations = "allow"
manual_range_contains = "allow"
[dependencies] [dependencies]
smallvec = { version = "1.11", features = ["const_generics", "const_new"] } tinyvec = { version = "1.6", features = ["std", "rustc_1_57"] }
percent-encoding = "2" percent-encoding = "2"
time = { version = "0.3", features = ["formatting", "macros"] } time = { version = "0.3", features = ["formatting", "macros"] }
indexmap = "2" indexmap = "2"
ref-cast = "1.0" ref-cast = "1.0"
uncased = "0.9.10" uncased = "0.9.10"
either = "1" either = "1"
pear = "0.2.8" pear = "0.2.9"
memchr = "2" memchr = "2"
stable-pattern = "0.1" stable-pattern = "0.1"
cookie = { version = "0.18", features = ["percent-encode"] } cookie = { version = "0.18", features = ["percent-encode"] }

View File

@ -1,75 +1,9 @@
//! Extension traits implemented by several HTTP types. //! Extension traits implemented by several HTTP types.
use smallvec::{Array, SmallVec};
use state::InitCell;
// TODO: It would be nice if we could somehow have one trait that could give us
// either SmallVec or Vec.
/// Trait implemented by types that can be converted into a collection.
pub trait IntoCollection<T>: Sized {
/// Converts `self` into a collection.
fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A>;
#[doc(hidden)]
fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, f: F) -> SmallVec<A>;
}
impl<T> IntoCollection<T> for T {
#[inline]
fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> {
let mut vec = SmallVec::new();
vec.push(self);
vec
}
#[inline(always)]
fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, mut f: F) -> SmallVec<A> {
f(self).into_collection()
}
}
impl<T> IntoCollection<T> for Vec<T> {
#[inline(always)]
fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> {
SmallVec::from_vec(self)
}
#[inline]
fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, f: F) -> SmallVec<A> {
self.into_iter().map(f).collect()
}
}
impl<T: Clone> IntoCollection<T> for &[T] {
#[inline(always)]
fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> {
self.iter().cloned().collect()
}
#[inline]
fn mapped<U, F, A: Array<Item=U>>(self, f: F) -> SmallVec<A>
where F: FnMut(T) -> U
{
self.iter().cloned().map(f).collect()
}
}
impl<T, const N: usize> IntoCollection<T> for [T; N] {
#[inline(always)]
fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> {
self.into_iter().collect()
}
#[inline]
fn mapped<U, F, A: Array<Item=U>>(self, f: F) -> SmallVec<A>
where F: FnMut(T) -> U
{
self.into_iter().map(f).collect()
}
}
use std::borrow::Cow; use std::borrow::Cow;
use state::InitCell;
/// Trait implemented by types that can be converted into owned versions of /// Trait implemented by types that can be converted into owned versions of
/// themselves. /// themselves.
pub trait IntoOwned { pub trait IntoOwned {

View File

@ -1,11 +1,9 @@
use std::borrow::Cow;
use std::ops::Deref; use std::ops::Deref;
use std::str::FromStr; use std::str::FromStr;
use std::fmt; use std::fmt;
use smallvec::SmallVec;
use crate::{Header, MediaType}; use crate::{Header, MediaType};
use crate::ext::IntoCollection;
use crate::parse::parse_accept; use crate::parse::parse_accept;
/// The HTTP Accept header. /// The HTTP Accept header.
@ -52,7 +50,7 @@ use crate::parse::parse_accept;
/// let response = Response::build().header(Accept::JSON).finalize(); /// let response = Response::build().header(Accept::JSON).finalize();
/// ``` /// ```
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Accept(pub(crate) SmallVec<[QMediaType; 1]>); pub struct Accept(pub(crate) Cow<'static, [QMediaType]>);
/// A `MediaType` with an associated quality value. /// A `MediaType` with an associated quality value.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
@ -65,9 +63,10 @@ macro_rules! accept_constructor {
#[doc="An `Accept` header with the single media type for"] #[doc="An `Accept` header with the single media type for"]
#[doc=concat!("**", $str, "**: ", "_", $t, "/", $s, "_")] #[doc=concat!("**", $str, "**: ", "_", $t, "/", $s, "_")]
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const $name: Accept = Accept( pub const $name: Accept = Accept({
SmallVec::from_const([QMediaType(MediaType::$name, None)]) const INNER: &[QMediaType] = &[QMediaType(MediaType::$name, None)];
); Cow::Borrowed(INNER)
});
)+ )+
}; };
} }
@ -86,29 +85,48 @@ impl Accept {
/// # extern crate rocket; /// # extern crate rocket;
/// use rocket::http::{QMediaType, MediaType, Accept}; /// use rocket::http::{QMediaType, MediaType, Accept};
/// ///
/// // Construct an `Accept` via a `Vec<QMediaType>`. /// // Construct an `Accept` via a `Vec<MediaType>`.
/// let json_then_html = vec![MediaType::JSON.into(), MediaType::HTML.into()]; /// let json_then_html = vec![MediaType::JSON, MediaType::HTML];
/// let accept = Accept::new(json_then_html); /// let accept = Accept::new(json_then_html);
/// assert_eq!(accept.preferred().media_type(), &MediaType::JSON); /// assert_eq!(accept.preferred().media_type(), &MediaType::JSON);
/// ///
/// // Construct an `Accept` via an `[QMediaType]`. /// // Construct an `Accept` via an `[MediaType]`.
/// let accept = Accept::new([MediaType::JSON.into(), MediaType::HTML.into()]); /// let accept = Accept::new([MediaType::JSON, MediaType::HTML]);
/// assert_eq!(accept.preferred().media_type(), &MediaType::JSON); /// assert_eq!(accept.preferred().media_type(), &MediaType::JSON);
/// ///
/// // Construct an `Accept` via a `QMediaType`. /// // Construct an `Accept` via a single `QMediaType`.
/// let accept = Accept::new(QMediaType(MediaType::JSON, None)); /// let accept = Accept::new(QMediaType(MediaType::JSON, Some(0.4)));
/// assert_eq!(accept.preferred().media_type(), &MediaType::JSON); /// assert_eq!(accept.preferred().media_type(), &MediaType::JSON);
/// ``` /// ```
#[inline(always)] #[inline(always)]
pub fn new<T: IntoCollection<QMediaType>>(items: T) -> Accept { pub fn new<T: IntoIterator<Item = M>, M: Into<QMediaType>>(items: T) -> Accept {
Accept(items.into_collection()) Accept(items.into_iter().map(|v| v.into()).collect())
} }
// TODO: Implement this. /// Adds `media_type` to `self`.
// #[inline(always)] ///
// pub fn add<M: Into<QMediaType>>(&mut self, media_type: M) { /// # Example
// self.0.push(media_type.into()); ///
// } /// ```rust
/// # extern crate rocket;
/// use rocket::http::{QMediaType, MediaType, Accept};
///
/// let mut accept = Accept::new(QMediaType(MediaType::JSON, Some(0.1)));
/// assert_eq!(accept.preferred().media_type(), &MediaType::JSON);
/// assert_eq!(accept.iter().count(), 1);
///
/// accept.add(QMediaType(MediaType::HTML, Some(0.7)));
/// assert_eq!(accept.preferred().media_type(), &MediaType::HTML);
/// assert_eq!(accept.iter().count(), 2);
///
/// accept.add(QMediaType(MediaType::XML, Some(0.6)));
/// assert_eq!(accept.preferred().media_type(), &MediaType::HTML);
/// assert_eq!(accept.iter().count(), 3);
/// ```
#[inline(always)]
pub fn add<M: Into<QMediaType>>(&mut self, media_type: M) {
self.0.to_mut().push(media_type.into());
}
/// Retrieve the client's preferred media type. This method follows [RFC /// Retrieve the client's preferred media type. This method follows [RFC
/// 7231 5.3.2]. If the list of media types is empty, this method returns a /// 7231 5.3.2]. If the list of media types is empty, this method returns a
@ -233,10 +251,10 @@ impl Accept {
known_media_types!(accept_constructor); known_media_types!(accept_constructor);
} }
impl<T: IntoCollection<MediaType>> From<T> for Accept { impl<T: IntoIterator<Item = MediaType>> From<T> for Accept {
#[inline(always)] #[inline(always)]
fn from(items: T) -> Accept { fn from(items: T) -> Accept {
Accept(items.mapped(|item| item.into())) Accept::new(items.into_iter().map(QMediaType::from))
} }
} }
@ -332,6 +350,16 @@ impl QMediaType {
} }
} }
impl IntoIterator for QMediaType {
type Item = Self;
type IntoIter = std::iter::Once<Self>;
fn into_iter(self) -> Self::IntoIter {
std::iter::once(self)
}
}
impl From<MediaType> for QMediaType { impl From<MediaType> for QMediaType {
#[inline(always)] #[inline(always)]
fn from(media_type: MediaType) -> QMediaType { fn from(media_type: MediaType) -> QMediaType {

View File

@ -5,7 +5,6 @@ use std::fmt;
use crate::header::{Header, MediaType}; use crate::header::{Header, MediaType};
use crate::uncased::UncasedStr; use crate::uncased::UncasedStr;
use crate::ext::IntoCollection;
/// Representation of HTTP Content-Types. /// Representation of HTTP Content-Types.
/// ///
@ -247,7 +246,7 @@ impl ContentType {
/// # extern crate rocket; /// # extern crate rocket;
/// use rocket::http::ContentType; /// use rocket::http::ContentType;
/// ///
/// let id = ContentType::new("application", "x-id").with_params(("id", "1")); /// let id = ContentType::new("application", "x-id").with_params([("id", "1")]);
/// assert_eq!(id.to_string(), "application/x-id; id=1".to_string()); /// assert_eq!(id.to_string(), "application/x-id; id=1".to_string());
/// ``` /// ```
/// ///
@ -265,7 +264,7 @@ impl ContentType {
pub fn with_params<K, V, P>(self, parameters: P) -> ContentType pub fn with_params<K, V, P>(self, parameters: P) -> ContentType
where K: Into<Cow<'static, str>>, where K: Into<Cow<'static, str>>,
V: Into<Cow<'static, str>>, V: Into<Cow<'static, str>>,
P: IntoCollection<(K, V)> P: IntoIterator<Item = (K, V)>
{ {
ContentType(self.0.with_params(parameters)) ContentType(self.0.with_params(parameters))
} }

View File

@ -90,11 +90,9 @@ impl<'h> Header<'h> {
#[doc(hidden)] #[doc(hidden)]
pub const fn is_valid_name(name: &str) -> bool { pub const fn is_valid_name(name: &str) -> bool {
const fn is_tchar(b: &u8) -> bool { const fn is_tchar(b: &u8) -> bool {
b.is_ascii_alphanumeric() || match *b { b.is_ascii_alphanumeric() || matches!(*b,
b'!' | b'#' | b'$' | b'%' | b'&' | b'\'' | b'*' | b'+' | b'-' | b'!' | b'#' | b'$' | b'%' | b'&' | b'\'' | b'*' | b'+' | b'-' |
b'.' | b'^' | b'_' | b'`' | b'|' | b'~' => true, b'.' | b'^' | b'_' | b'`' | b'|' | b'~')
_ => false
}
} }
let mut i = 0; let mut i = 0;
@ -522,7 +520,7 @@ impl<'h> HeaderMap<'h> {
#[inline(always)] #[inline(always)]
pub fn add<'p: 'h, H: Into<Header<'p>>>(&mut self, header: H) { pub fn add<'p: 'h, H: Into<Header<'p>>>(&mut self, header: H) {
let header = header.into(); let header = header.into();
self.headers.entry(header.name).or_insert(vec![]).push(header.value); self.headers.entry(header.name).or_default().push(header.value);
} }
/// A convenience method to add a header using a raw name and value. /// A convenience method to add a header using a raw name and value.
@ -580,7 +578,7 @@ impl<'h> HeaderMap<'h> {
where 'n:'h, H: Into<Cow<'n, str>> where 'n:'h, H: Into<Cow<'n, str>>
{ {
self.headers.entry(Uncased::new(name)) self.headers.entry(Uncased::new(name))
.or_insert(vec![]) .or_default()
.append(values) .append(values)
} }
@ -660,6 +658,7 @@ impl<'h> HeaderMap<'h> {
/// ///
/// // The headers we'll be storing. /// // The headers we'll be storing.
/// let all_headers = vec![ /// let all_headers = vec![
/// Header::new("X-Custom", "value_0"),
/// Header::new("X-Custom", "value_1"), /// Header::new("X-Custom", "value_1"),
/// Header::new("X-Other", "other"), /// Header::new("X-Other", "other"),
/// Header::new("X-Third", "third"), /// Header::new("X-Third", "third"),
@ -672,15 +671,19 @@ impl<'h> HeaderMap<'h> {
/// } /// }
/// ///
/// // Ensure there are three headers via the iterator. /// // Ensure there are three headers via the iterator.
/// assert_eq!(map.iter().count(), 3); /// assert_eq!(map.iter().count(), 4);
/// ///
/// // Actually iterate through them. /// // Actually iterate through them.
/// let mut custom = 0;
/// for header in map.iter() { /// for header in map.iter() {
/// match header.name().as_str() { /// match header.name().as_str() {
/// "X-Custom" => assert_eq!(header.value(), "value_1"),
/// "X-Other" => assert_eq!(header.value(), "other"), /// "X-Other" => assert_eq!(header.value(), "other"),
/// "X-Third" => assert_eq!(header.value(), "third"), /// "X-Third" => assert_eq!(header.value(), "third"),
/// _ => unreachable!("there are only three headers") /// "X-Custom" => {
/// assert_eq!(header.value(), format!("value_{custom}"));
/// custom += 1;
/// },
/// _ => unreachable!("there are only three uniquely named headers")
/// } /// }
/// } /// }
/// ``` /// ```
@ -692,53 +695,6 @@ impl<'h> HeaderMap<'h> {
}) })
} }
/// Consumes `self` and returns an iterator over all of the `Header`s stored
/// in the map. Header names are returned in no specific order, but all
/// values for a given header name are grouped together, and values are in
/// FIFO order.
///
/// # Example
///
/// ```rust
/// # extern crate rocket;
/// use rocket::http::{HeaderMap, Header};
///
/// // The headers we'll be storing.
/// let all_headers = vec![
/// Header::new("X-Custom", "value_1"),
/// Header::new("X-Other", "other"),
/// Header::new("X-Third", "third"),
/// ];
///
/// // Create a map, store all of the headers.
/// let mut map = HeaderMap::new();
/// for header in all_headers {
/// map.add(header)
/// }
///
/// // Ensure there are three headers via the iterator.
/// assert_eq!(map.iter().count(), 3);
///
/// // Actually iterate through them.
/// for header in map.into_iter() {
/// match header.name().as_str() {
/// "X-Custom" => assert_eq!(header.value(), "value_1"),
/// "X-Other" => assert_eq!(header.value(), "other"),
/// "X-Third" => assert_eq!(header.value(), "third"),
/// _ => unreachable!("there are only three headers")
/// }
/// }
/// ```
// TODO: Implement IntoIterator.
#[inline(always)]
pub fn into_iter(self) -> impl Iterator<Item=Header<'h>> {
self.headers.into_iter().flat_map(|(name, value)| {
value.into_iter().map(move |value| {
Header { name: name.clone(), value }
})
})
}
/// Consumes `self` and returns an iterator over all of the headers stored /// Consumes `self` and returns an iterator over all of the headers stored
/// in the map in the way they are stored. This is a low-level mechanism and /// in the map in the way they are stored. This is a low-level mechanism and
/// should likely not be used. /// should likely not be used.
@ -750,6 +706,86 @@ impl<'h> HeaderMap<'h> {
} }
} }
/// Consumes `self` and returns an iterator over all of the `Header`s stored
/// in the map. Header names are returned in no specific order, but all
/// values for a given header name are grouped together, and values are in
/// FIFO order.
///
/// # Example
///
/// ```rust
/// # extern crate rocket;
/// use rocket::http::{HeaderMap, Header};
///
/// // The headers we'll be storing.
/// let all_headers = vec![
/// Header::new("X-Custom", "value_0"),
/// Header::new("X-Custom", "value_1"),
/// Header::new("X-Other", "other"),
/// Header::new("X-Third", "third"),
/// ];
///
/// // Create a map, store all of the headers.
/// let mut map = HeaderMap::new();
/// for header in all_headers {
/// map.add(header)
/// }
///
/// // Ensure there are three headers via the iterator.
/// assert_eq!(map.iter().count(), 4);
///
/// // Actually iterate through them.
/// let mut custom = 0;
/// for header in map.into_iter() {
/// match header.name().as_str() {
/// "X-Other" => assert_eq!(header.value(), "other"),
/// "X-Third" => assert_eq!(header.value(), "third"),
/// "X-Custom" => {
/// assert_eq!(header.value(), format!("value_{custom}"));
/// custom += 1;
/// },
/// _ => unreachable!("there are only three uniquely named headers")
/// }
/// }
/// ```
impl<'h> IntoIterator for HeaderMap<'h> {
type Item = Header<'h>;
type IntoIter = IntoIter<'h>;
fn into_iter(self) -> Self::IntoIter {
IntoIter {
headers: self.headers.into_iter(),
current: None,
}
}
}
/// Owned iterator over [`Header`]s in a [`HeaderMap`].
///
/// See [`HeaderMap::into_iter()`] for details.
pub struct IntoIter<'h> {
headers: indexmap::map::IntoIter<Uncased<'h>, Vec<Cow<'h, str>>>,
current: Option<(Uncased<'h>, std::vec::IntoIter<Cow<'h, str>>)>,
}
impl<'h> Iterator for IntoIter<'h> {
type Item = Header<'h>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some((name, values)) = &mut self.current {
if let Some(value) = values.next() {
return Some(Header { name: name.clone(), value });
}
}
let (name, values) = self.headers.next()?;
self.current = Some((name, values.into_iter()));
}
}
}
impl From<cookie::Cookie<'_>> for Header<'static> { impl From<cookie::Cookie<'_>> for Header<'static> {
fn from(cookie: cookie::Cookie<'_>) -> Header<'static> { fn from(cookie: cookie::Cookie<'_>) -> Header<'static> {
Header::new("Set-Cookie", cookie.encoded().to_string()) Header::new("Set-Cookie", cookie.encoded().to_string())

View File

@ -5,12 +5,9 @@ use std::hash::{Hash, Hasher};
use either::Either; use either::Either;
use crate::ext::IntoCollection;
use crate::uncased::UncasedStr; use crate::uncased::UncasedStr;
use crate::parse::{Indexed, IndexedStr, parse_media_type}; use crate::parse::{Indexed, IndexedStr, parse_media_type};
use smallvec::SmallVec;
/// An HTTP media type. /// An HTTP media type.
/// ///
/// # Usage /// # Usage
@ -58,14 +55,14 @@ pub struct MediaType {
/// The subtype. /// The subtype.
pub(crate) sub: IndexedStr<'static>, pub(crate) sub: IndexedStr<'static>,
/// The parameters, if any. /// The parameters, if any.
pub(crate) params: MediaParams pub(crate) params: MediaParams,
} }
// FIXME: `Static` variant is needed for `const`. Need `const SmallVec::new`. // TODO: `Static` variant is needed for `const`.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) enum MediaParams { pub(crate) enum MediaParams {
Static(&'static [(&'static str, &'static str)]), Static(&'static [(&'static str, &'static str)]),
Dynamic(SmallVec<[(IndexedStr<'static>, IndexedStr<'static>); 2]>) Dynamic(Vec<(IndexedStr<'static>, IndexedStr<'static>)>)
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -309,7 +306,7 @@ impl MediaType {
/// # extern crate rocket; /// # extern crate rocket;
/// use rocket::http::MediaType; /// use rocket::http::MediaType;
/// ///
/// let id = MediaType::new("application", "x-id").with_params(("id", "1")); /// let id = MediaType::new("application", "x-id").with_params([("id", "1")]);
/// assert_eq!(id.to_string(), "application/x-id; id=1".to_string()); /// assert_eq!(id.to_string(), "application/x-id; id=1".to_string());
/// ``` /// ```
/// ///
@ -327,11 +324,12 @@ impl MediaType {
pub fn with_params<K, V, P>(mut self, ps: P) -> MediaType pub fn with_params<K, V, P>(mut self, ps: P) -> MediaType
where K: Into<Cow<'static, str>>, where K: Into<Cow<'static, str>>,
V: Into<Cow<'static, str>>, V: Into<Cow<'static, str>>,
P: IntoCollection<(K, V)> P: IntoIterator<Item = (K, V)>
{ {
use Indexed::Concrete; let params = ps.into_iter()
.map(|(k, v)| (Indexed::Concrete(k.into()), Indexed::Concrete(v.into())))
.collect();
let params = ps.mapped(|(k, v)| (Concrete(k.into()), Concrete(v.into())));
self.params = MediaParams::Dynamic(params); self.params = MediaParams::Dynamic(params);
self self
} }
@ -478,7 +476,7 @@ impl MediaType {
/// use rocket::http::MediaType; /// use rocket::http::MediaType;
/// ///
/// let plain = MediaType::Plain; /// let plain = MediaType::Plain;
/// let plain2 = MediaType::new("text", "plain").with_params(("charset", "utf-8")); /// let plain2 = MediaType::new("text", "plain").with_params([("charset", "utf-8")]);
/// let just_plain = MediaType::new("text", "plain"); /// let just_plain = MediaType::new("text", "plain");
/// ///
/// // The `PartialEq` implementation doesn't consider parameters. /// // The `PartialEq` implementation doesn't consider parameters.
@ -527,7 +525,7 @@ impl MediaType {
let raw = match self.params { let raw = match self.params {
MediaParams::Static(slice) => Either::Left(slice.iter().cloned()), MediaParams::Static(slice) => Either::Left(slice.iter().cloned()),
MediaParams::Dynamic(ref vec) => { MediaParams::Dynamic(ref vec) => {
Either::Right(vec.iter().map(move |&(ref key, ref val)| { Either::Right(vec.iter().map(move |(key, val)| {
let source_str = self.source.as_str(); let source_str = self.source.as_str();
(key.from_source(source_str), val.from_source(source_str)) (key.from_source(source_str), val.from_source(source_str))
})) }))
@ -594,9 +592,19 @@ impl fmt::Display for MediaType {
} }
} }
impl IntoIterator for MediaType {
type Item = Self;
type IntoIter = std::iter::Once<Self>;
fn into_iter(self) -> Self::IntoIter {
std::iter::once(self)
}
}
impl Default for MediaParams { impl Default for MediaParams {
fn default() -> Self { fn default() -> Self {
MediaParams::Dynamic(SmallVec::new()) MediaParams::Dynamic(Vec::new())
} }
} }

View File

@ -33,7 +33,6 @@ pub mod uncased {
#[path = "."] #[path = "."]
pub mod private { pub mod private {
pub use crate::parse::Indexed; pub use crate::parse::Indexed;
pub use smallvec::{SmallVec, Array};
} }
pub use crate::method::Method; pub use crate::method::Method;

View File

@ -140,7 +140,7 @@ mod serde {
use serde_::ser::{Serialize, Serializer}; use serde_::ser::{Serialize, Serializer};
use serde_::de::{Deserialize, Deserializer, Error, Visitor, Unexpected}; use serde_::de::{Deserialize, Deserializer, Error, Visitor, Unexpected};
impl<'a> Serialize for Method { impl Serialize for Method {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(self.as_str()) serializer.serialize_str(self.as_str())
} }

View File

@ -31,7 +31,8 @@ fn weighted_media_type<'a>(input: &mut Input<'a>) -> Result<'a, QMediaType> {
#[parser] #[parser]
fn accept<'a>(input: &mut Input<'a>) -> Result<'a, Accept> { fn accept<'a>(input: &mut Input<'a>) -> Result<'a, Accept> {
Accept(series(|i| surrounded(i, weighted_media_type, is_whitespace), ',')?) let vec = series(|i| surrounded(i, weighted_media_type, is_whitespace), ',')?;
Accept(std::borrow::Cow::Owned(vec))
} }
pub fn parse_accept(input: &str) -> Result<'_, Accept> { pub fn parse_accept(input: &str) -> Result<'_, Accept> {

View File

@ -211,8 +211,8 @@ impl<'a, T: ?Sized + ToOwned + 'a> Indexed<'a, T>
} }
match *self { match *self {
Indexed::Indexed(i, j) => &source.unwrap()[(i as usize)..(j as usize)], Indexed::Indexed(i, j) => &source.unwrap()[i..j],
Indexed::Concrete(ref mstr) => &*mstr, Indexed::Concrete(ref mstr) => mstr,
} }
} }
} }
@ -241,7 +241,7 @@ impl<'a, T: ?Sized + Length + ToOwned + 'a> Length for Indexed<'a, T> {
#[inline(always)] #[inline(always)]
fn len(&self) -> usize { fn len(&self) -> usize {
match *self { match *self {
Indexed::Indexed(a, b) => (b - a) as usize, Indexed::Indexed(a, b) => b.saturating_sub(a),
Indexed::Concrete(ref cow) => cow.len() Indexed::Concrete(ref cow) => cow.len()
} }
} }

View File

@ -79,7 +79,7 @@ fn test_assert_parse_eq() {
#[should_panic] #[should_panic]
fn test_assert_parse_eq_consecutive() { fn test_assert_parse_eq_consecutive() {
assert_parse_eq! { assert_parse_eq! {
"/" => Origin::ROOT, "/" => Origin::root(),
"/" => Asterisk "/" => Asterisk
}; };
} }
@ -130,7 +130,7 @@ fn test_parse_issue_924_samples() {
fn single_byte() { fn single_byte() {
assert_parse_eq!( assert_parse_eq!(
"*" => Asterisk, "*" => Asterisk,
"/" => Origin::ROOT, "/" => Origin::root(),
"." => Authority::new(None, ".", None), "." => Authority::new(None, ".", None),
"_" => Authority::new(None, "_", None), "_" => Authority::new(None, "_", None),
"1" => Authority::new(None, "1", None), "1" => Authority::new(None, "1", None),

View File

@ -370,7 +370,7 @@ impl Eq for Status { }
impl PartialOrd for Status { impl PartialOrd for Status {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.code.partial_cmp(&other.code) Some(self.cmp(other))
} }
} }
@ -387,7 +387,7 @@ mod serde {
use serde_::ser::{Serialize, Serializer}; use serde_::ser::{Serialize, Serializer};
use serde_::de::{Deserialize, Deserializer, Error, Visitor, Unexpected}; use serde_::de::{Deserialize, Deserializer, Error, Visitor, Unexpected};
impl<'a> Serialize for Status { impl Serialize for Status {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_u16(self.code) serializer.serialize_u16(self.code)
} }

View File

@ -309,11 +309,9 @@ impl<'a> Absolute<'a> {
if !self.path().is_normalized(true) { if !self.path().is_normalized(true) {
self.path = self.path().to_normalized(true, true); self.path = self.path().to_normalized(true, true);
} }
} else { } else if !self.path().is_normalized(false) {
if !self.path().is_normalized(false) {
self.path = self.path().to_normalized(false, true); self.path = self.path().to_normalized(false, true);
} }
}
if let Some(query) = self.query() { if let Some(query) = self.query() {
if !query.is_normalized() { if !query.is_normalized() {

View File

@ -2,8 +2,6 @@ use std::fmt::{self, Write};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::borrow::Cow; use std::borrow::Cow;
use smallvec::SmallVec;
use crate::uri::{Absolute, Origin, Reference}; use crate::uri::{Absolute, Origin, Reference};
use crate::uri::fmt::{UriDisplay, Part, Path, Query, Kind}; use crate::uri::fmt::{UriDisplay, Part, Path, Query, Kind};
@ -143,7 +141,7 @@ use crate::uri::fmt::{UriDisplay, Part, Path, Query, Kind};
/// [`write_raw()`]: Formatter::write_raw() /// [`write_raw()`]: Formatter::write_raw()
/// [`refresh()`]: Formatter::refresh() /// [`refresh()`]: Formatter::refresh()
pub struct Formatter<'i, P: Part> { pub struct Formatter<'i, P: Part> {
prefixes: SmallVec<[&'static str; 3]>, prefixes: tinyvec::TinyVec<[&'static str; 3]>,
inner: &'i mut (dyn Write + 'i), inner: &'i mut (dyn Write + 'i),
previous: bool, previous: bool,
fresh: bool, fresh: bool,
@ -155,7 +153,7 @@ impl<'i, P: Part> Formatter<'i, P> {
pub(crate) fn new(inner: &'i mut (dyn Write + 'i)) -> Self { pub(crate) fn new(inner: &'i mut (dyn Write + 'i)) -> Self {
Formatter { Formatter {
inner, inner,
prefixes: SmallVec::new(), prefixes: Default::default(),
previous: false, previous: false,
fresh: true, fresh: true,
_marker: PhantomData, _marker: PhantomData,
@ -357,7 +355,7 @@ impl Formatter<'_, Query> {
} }
} }
f(&mut PrefixGuard::new(prefix, self).0) f(PrefixGuard::new(prefix, self).0)
} }
/// Writes the named value `value` by prefixing `name` followed by `=` to /// Writes the named value `value` by prefixing `name` followed by `=` to

View File

@ -121,7 +121,10 @@ pub struct Origin<'a> {
impl<'a> Origin<'a> { impl<'a> Origin<'a> {
/// The root: `'/'`. /// The root: `'/'`.
#[doc(hidden)] #[doc(hidden)]
pub const ROOT: Origin<'static> = Origin::const_new("/", None); pub fn root() -> &'static Origin<'static> {
static ROOT_ORIGIN: Origin<'static> = Origin::const_new("/", None);
&ROOT_ORIGIN
}
/// SAFETY: `source` must be UTF-8. /// SAFETY: `source` must be UTF-8.
#[inline] #[inline]
@ -218,7 +221,7 @@ impl<'a> Origin<'a> {
if !string.starts_with('/') { if !string.starts_with('/') {
return Err(Error { return Err(Error {
expected: Expected::token(Some(&b'/'), string.as_bytes().get(0).cloned()), expected: Expected::token(Some(&b'/'), string.as_bytes().first().cloned()),
index: 0, index: 0,
}); });
} }

View File

@ -366,11 +366,9 @@ impl<'a> Reference<'a> {
if !self.path().is_normalized(true) { if !self.path().is_normalized(true) {
self.path = self.path().to_normalized(true, true); self.path = self.path().to_normalized(true, true);
} }
} else { } else if !self.path().is_normalized(false) {
if !self.path().is_normalized(false) {
self.path = self.path().to_normalized(false, true); self.path = self.path().to_normalized(false, true);
} }
}
if let Some(query) = self.query() { if let Some(query) = self.query() {
if !query.is_normalized() { if !query.is_normalized() {

View File

@ -235,7 +235,7 @@ impl<'a> Segments<'a, Path> {
} else if cfg!(windows) && segment.contains(':') { } else if cfg!(windows) && segment.contains(':') {
return Err(PathError::BadChar(':')) return Err(PathError::BadChar(':'))
} else { } else {
buf.push(&*segment) buf.push(segment)
} }
} }

View File

@ -334,6 +334,24 @@ macro_rules! impl_uri_from {
} }
} }
} }
impl<'b, $($lt)?> PartialEq<&$T $(<$lt>)?> for Uri<'b> {
fn eq(&self, other: &&$T $(<$lt>)?) -> bool {
match self {
Uri::$T(inner) => inner == *other,
_ => false
}
}
}
impl<'b, $($lt)?> PartialEq<Uri<'b>> for &$T $(<$lt>)? {
fn eq(&self, other: &Uri<'b>) -> bool {
match other {
Uri::$T(inner) => inner == *self,
_ => false
}
}
}
) )
} }

View File

@ -19,6 +19,18 @@ rust-version = "1.75"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
[lints.rust]
rust_2018_idioms = "warn"
# missing_docs = "warn"
async_fn_in_trait = "allow"
refining_impl_trait = "allow"
[lints.clippy]
type_complexity = "allow"
module_inception = "allow"
multiple_bound_locations = "allow"
manual_range_contains = "allow"
[features] [features]
default = ["http2", "tokio-macros"] default = ["http2", "tokio-macros"]
http2 = ["hyper/http2", "hyper-util/http2"] http2 = ["hyper/http2", "hyper-util/http2"]

View File

@ -134,7 +134,7 @@ pub struct Catcher {
// The rank is computed as -(number of nonempty segments in base) => catchers // The rank is computed as -(number of nonempty segments in base) => catchers
// with more nonempty segments have lower ranks => higher precedence. // with more nonempty segments have lower ranks => higher precedence.
fn rank(base: Path<'_>) -> isize { fn rank(base: Path<'_>) -> isize {
-1 * (base.segments().filter(|s| !s.is_empty()).count() as isize) -(base.segments().filter(|s| !s.is_empty()).count() as isize)
} }
impl Catcher { impl Catcher {
@ -184,9 +184,9 @@ impl Catcher {
Catcher { Catcher {
name: None, name: None,
base: uri::Origin::ROOT, base: uri::Origin::root().clone(),
handler: Box::new(handler), handler: Box::new(handler),
rank: rank(uri::Origin::ROOT.path()), rank: rank(uri::Origin::root().path()),
code code
} }
} }

View File

@ -315,7 +315,7 @@ impl Config {
#[cfg(feature = "secrets")] #[cfg(feature = "secrets")]
pub(crate) fn known_secret_key_used(&self) -> bool { pub(crate) fn known_secret_key_used(&self) -> bool {
const KNOWN_SECRET_KEYS: &'static [&'static str] = &[ const KNOWN_SECRET_KEYS: &[&str] = &[
"hPRYyVRiMyxpw5sBB1XeCMN1kFsDCqKvBi2QJxBVHQk=" "hPRYyVRiMyxpw5sBB1XeCMN1kFsDCqKvBi2QJxBVHQk="
]; ];
@ -363,7 +363,7 @@ impl Config {
None => launch_meta_!("Proxy-Proto header: {}", "disabled".paint(VAL)) None => launch_meta_!("Proxy-Proto header: {}", "disabled".paint(VAL))
} }
launch_meta_!("limits: {}", (&self.limits).paint(VAL)); launch_meta_!("limits: {}", self.limits.paint(VAL));
launch_meta_!("temp dir: {}", self.temp_dir.relative().display().paint(VAL)); launch_meta_!("temp dir: {}", self.temp_dir.relative().display().paint(VAL));
launch_meta_!("http/2: {}", (cfg!(feature = "http2").paint(VAL))); launch_meta_!("http/2: {}", (cfg!(feature = "http2").paint(VAL)));

View File

@ -80,7 +80,7 @@ pub struct Ident(Option<String>);
macro_rules! ident { macro_rules! ident {
($value:expr) => { ($value:expr) => {
{ {
#[allow(unknown_lints, eq_op)] #[allow(unknown_lints)]
const _: [(); 0 - !{ const _: [(); 0 - !{
const ASSERT: bool = $crate::http::Header::is_valid_value($value, false); const ASSERT: bool = $crate::http::Header::is_valid_value($value, false);
ASSERT ASSERT

View File

@ -22,7 +22,7 @@ impl<const N: usize, R: AsyncRead + Unpin> Peekable<N, R> {
let to_read = std::cmp::min(N, num); let to_read = std::cmp::min(N, num);
if self.buffer.len() >= to_read { if self.buffer.len() >= to_read {
return &self.buffer.as_slice(); return self.buffer.as_slice();
} }
if self.buffer.capacity() == 0 { if self.buffer.capacity() == 0 {

View File

@ -157,13 +157,13 @@ impl<'a, 'b> Deref for TransformBuf<'a, 'b> {
type Target = ReadBuf<'b>; type Target = ReadBuf<'b>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.buf self.buf
} }
} }
impl<'a, 'b> DerefMut for TransformBuf<'a, 'b> { impl<'a, 'b> DerefMut for TransformBuf<'a, 'b> {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.buf self.buf
} }
} }

View File

@ -67,11 +67,11 @@ impl ErasedRequest {
let rocket: Arc<Rocket<Orbit>> = rocket; let rocket: Arc<Rocket<Orbit>> = rocket;
let parts: Box<Parts> = Box::new(parts); let parts: Box<Parts> = Box::new(parts);
let request: Request<'_> = { let request: Request<'_> = {
let rocket: &Rocket<Orbit> = &*rocket; let rocket: &Rocket<Orbit> = &rocket;
let rocket: &'static Rocket<Orbit> = unsafe { transmute(rocket) }; let rocket: &'static Rocket<Orbit> = unsafe { transmute(rocket) };
let parts: &Parts = &*parts; let parts: &Parts = &parts;
let parts: &'static Parts = unsafe { transmute(parts) }; let parts: &'static Parts = unsafe { transmute(parts) };
constructor(&rocket, &parts) constructor(rocket, parts)
}; };
ErasedRequest { _rocket: rocket, _parts: parts, request, } ErasedRequest { _rocket: rocket, _parts: parts, request, }
@ -99,7 +99,7 @@ impl ErasedRequest {
let mut parent = Arc::new(self); let mut parent = Arc::new(self);
let token: T = { let token: T = {
let parent: &mut ErasedRequest = Arc::get_mut(&mut parent).unwrap(); let parent: &mut ErasedRequest = Arc::get_mut(&mut parent).unwrap();
let rocket: &Rocket<Orbit> = &*parent._rocket; let rocket: &Rocket<Orbit> = &parent._rocket;
let request: &mut Request<'_> = &mut parent.request; let request: &mut Request<'_> = &mut parent.request;
let data: &mut Data<'_> = &mut data; let data: &mut Data<'_> = &mut data;
preprocess(rocket, request, data).await preprocess(rocket, request, data).await
@ -107,22 +107,22 @@ impl ErasedRequest {
let parent = parent; let parent = parent;
let response: Response<'_> = { let response: Response<'_> = {
let parent: &ErasedRequest = &*parent; let parent: &ErasedRequest = &parent;
let parent: &'static ErasedRequest = unsafe { transmute(parent) }; let parent: &'static ErasedRequest = unsafe { transmute(parent) };
let rocket: &Rocket<Orbit> = &*parent._rocket; let rocket: &Rocket<Orbit> = &parent._rocket;
let request: &Request<'_> = &parent.request; let request: &Request<'_> = &parent.request;
dispatch(token, rocket, request, data).await dispatch(token, rocket, request, data).await
}; };
ErasedResponse { ErasedResponse {
_request: parent, _request: parent,
response: response, response,
} }
} }
} }
impl ErasedResponse { impl ErasedResponse {
pub fn inner<'a>(&'a self) -> &'a Response<'a> { pub fn inner(&self) -> &Response<'_> {
static_assert_covariance!(Response); static_assert_covariance!(Response);
&self.response &self.response
} }
@ -135,7 +135,7 @@ impl ErasedResponse {
f(&mut self.response) f(&mut self.response)
} }
pub fn to_io_handler<'a>( pub fn make_io_handler<'a>(
&'a mut self, &'a mut self,
constructor: impl for<'r> FnOnce( constructor: impl for<'r> FnOnce(
&'r Request<'r>, &'r Request<'r>,
@ -144,7 +144,7 @@ impl ErasedResponse {
) -> Option<ErasedIoHandler> { ) -> Option<ErasedIoHandler> {
let parent: Arc<ErasedRequest> = self._request.clone(); let parent: Arc<ErasedRequest> = self._request.clone();
let io: Option<Box<dyn IoHandler + '_>> = { let io: Option<Box<dyn IoHandler + '_>> = {
let parent: &ErasedRequest = &*parent; let parent: &ErasedRequest = &parent;
let parent: &'static ErasedRequest = unsafe { transmute(parent) }; let parent: &'static ErasedRequest = unsafe { transmute(parent) };
let request: &Request<'_> = &parent.request; let request: &Request<'_> = &parent.request;
constructor(request, &mut self.response) constructor(request, &mut self.response)

View File

@ -92,7 +92,7 @@ pub enum ErrorKind {
InsecureSecretKey(Profile), InsecureSecretKey(Profile),
/// Liftoff failed. Contains the Rocket instance that failed to shutdown. /// Liftoff failed. Contains the Rocket instance that failed to shutdown.
Liftoff( Liftoff(
Result<Rocket<Ignite>, Arc<Rocket<Orbit>>>, Result<Box<Rocket<Ignite>>, Arc<Rocket<Orbit>>>,
Box<dyn StdError + Send + 'static> Box<dyn StdError + Send + 'static>
), ),
/// Shutdown failed. Contains the Rocket instance that failed to shutdown. /// Shutdown failed. Contains the Rocket instance that failed to shutdown.
@ -196,7 +196,7 @@ impl Error {
if collisions.is_empty() { return } if collisions.is_empty() { return }
error!("Rocket failed to launch due to the following {} collisions:", kind); error!("Rocket failed to launch due to the following {} collisions:", kind);
for &(ref a, ref b) in collisions { for (a, b) in collisions {
info_!("{} {} {}", a, "collides with".red().italic(), b) info_!("{} {} {}", a, "collides with".red().italic(), b)
} }
} }
@ -336,7 +336,7 @@ pub(crate) fn log_server_error(error: &(dyn StdError + 'static)) {
} }
} }
let mut error: &(dyn StdError + 'static) = &*error; let mut error: &(dyn StdError + 'static) = error;
if error.downcast_ref::<hyper::Error>().is_some() { if error.downcast_ref::<hyper::Error>().is_some() {
warn!("{}", ServerError(error)); warn!("{}", ServerError(error));
while let Some(source) = error.source() { while let Some(source) = error.source() {

View File

@ -10,18 +10,21 @@ mod private {
/// [`local_cache`](crate::request::local_cache) must implement this trait. /// [`local_cache`](crate::request::local_cache) must implement this trait.
/// Since this trait is sealed, the types implementing this trait are known /// Since this trait is sealed, the types implementing this trait are known
/// and finite: `String` and `Vec<T> for all T: Sync + Send + 'static`. /// and finite: `String` and `Vec<T> for all T: Sync + Send + 'static`.
// UNSAFE: Needs to have a stable address when deref'd. ///
/// # Safety
///
/// Types implementing this trait must have a stable address when deref'd.
pub unsafe trait Shareable: std::ops::Deref + Sync + Send + 'static { pub unsafe trait Shareable: std::ops::Deref + Sync + Send + 'static {
/// The current length of the owned shareable. /// The current size of the owned shareable.
fn len(&self) -> usize; fn size(&self) -> usize;
} }
unsafe impl Shareable for String { unsafe impl Shareable for String {
fn len(&self) -> usize { self.len() } fn size(&self) -> usize { self.len() }
} }
unsafe impl<T: Send + Sync + 'static> Shareable for Vec<T> { unsafe impl<T: Send + Sync + 'static> Shareable for Vec<T> {
fn len(&self) -> usize { self.len() } fn size(&self) -> usize { self.len() }
} }
} }
@ -36,8 +39,8 @@ pub struct SharedStack<T: Shareable> {
} }
impl<T: Shareable> SharedStack<T> impl<T: Shareable> SharedStack<T>
where T::Target: Index<RangeFrom<usize>, Output = T::Target> + where T::Target: Index<RangeFrom<usize>, Output = T::Target>
Index<RangeTo<usize>, Output = T::Target> + Index<RangeTo<usize>, Output = T::Target>
{ {
/// Creates a new stack. /// Creates a new stack.
pub fn new() -> Self { pub fn new() -> Self {
@ -101,11 +104,11 @@ impl<T: Shareable> SharedStack<T>
/// Pushes the strings `a` and `b` onto the stack without allocating for /// Pushes the strings `a` and `b` onto the stack without allocating for
/// both strings. Returns references to the two strings on the stack. /// both strings. Returns references to the two strings on the stack.
pub(crate) fn push_two<'a, V>(&'a self, a: V, b: V) -> (&'a T::Target, &'a T::Target) pub(crate) fn push_two<V>(&self, a: V, b: V) -> (&T::Target, &T::Target)
where T: From<V> + Extend<V>, where T: From<V> + Extend<V>,
{ {
let mut value = T::from(a); let mut value = T::from(a);
let split_len = value.len(); let split_len = value.size();
value.extend(Some(b)); value.extend(Some(b));
self.push_split(value, split_len) self.push_split(value, split_len)
} }

View File

@ -130,7 +130,7 @@ impl<'v> Context<'v> {
/// } /// }
/// ``` /// ```
pub fn field_value<N: AsRef<Name>>(&self, name: N) -> Option<&'v str> { pub fn field_value<N: AsRef<Name>>(&self, name: N) -> Option<&'v str> {
self.values.get(name.as_ref())?.get(0).cloned() self.values.get(name.as_ref())?.first().cloned()
} }
/// Returns the values, if any, submitted for the _value_ field named /// Returns the values, if any, submitted for the _value_ field named
@ -179,8 +179,7 @@ impl<'v> Context<'v> {
/// ``` /// ```
pub fn errors(&self) -> impl Iterator<Item = &Error<'v>> { pub fn errors(&self) -> impl Iterator<Item = &Error<'v>> {
self.errors.values() self.errors.values()
.map(|e| e.iter()) .flat_map(|e| e.iter())
.flatten()
.chain(self.form_errors.iter()) .chain(self.form_errors.iter())
} }
@ -224,8 +223,7 @@ impl<'v> Context<'v> {
where N: AsRef<Name> + 'a where N: AsRef<Name> + 'a
{ {
self.errors.values() self.errors.values()
.map(|e| e.iter()) .flat_map(|e| e.iter())
.flatten()
.filter(move |e| e.is_for(&name)) .filter(move |e| e.is_for(&name))
} }
@ -273,8 +271,7 @@ impl<'v> Context<'v> {
where N: AsRef<Name> + 'a where N: AsRef<Name> + 'a
{ {
self.errors.values() self.errors.values()
.map(|e| e.iter()) .flat_map(|e| e.iter())
.flatten()
.filter(move |e| e.is_for_exactly(&name)) .filter(move |e| e.is_for_exactly(&name))
} }

View File

@ -727,7 +727,7 @@ impl<'v> Error<'v> {
| Multipart(FieldSizeExceeded { .. }) | Multipart(FieldSizeExceeded { .. })
| Multipart(StreamSizeExceeded { .. }) => Status::PayloadTooLarge, | Multipart(StreamSizeExceeded { .. }) => Status::PayloadTooLarge,
Unknown => Status::InternalServerError, Unknown => Status::InternalServerError,
Io(_) | _ if self.entity == Entity::Form => Status::BadRequest, Io(_) if self.entity == Entity::Form => Status::BadRequest,
Custom(status, _) => status, Custom(status, _) => status,
_ => Status::UnprocessableEntity _ => Status::UnprocessableEntity
} }

View File

@ -810,8 +810,8 @@ impl<'v, K, V> FromForm<'v> for BTreeMap<K, V>
impl<'v, T: FromForm<'v>> FromForm<'v> for Option<T> { impl<'v, T: FromForm<'v>> FromForm<'v> for Option<T> {
type Context = <T as FromForm<'v>>::Context; type Context = <T as FromForm<'v>>::Context;
fn init(opts: Options) -> Self::Context { fn init(_: Options) -> Self::Context {
T::init(Options { strict: true, ..opts }) T::init(Options { strict: true })
} }
fn push_value(ctxt: &mut Self::Context, field: ValueField<'v>) { fn push_value(ctxt: &mut Self::Context, field: ValueField<'v>) {

View File

@ -78,8 +78,8 @@ impl<'v, T: FromForm<'v>> FromForm<'v> for Lenient<T> {
type Context = T::Context; type Context = T::Context;
#[inline(always)] #[inline(always)]
fn init(opts: Options) -> Self::Context { fn init(_: Options) -> Self::Context {
T::init(Options { strict: false, ..opts }) T::init(Options { strict: false })
} }
#[inline(always)] #[inline(always)]

View File

@ -71,7 +71,7 @@ impl Key {
/// assert_eq!(key.as_str(), "a:b:c"); /// assert_eq!(key.as_str(), "a:b:c");
/// ``` /// ```
pub fn as_str(&self) -> &str { pub fn as_str(&self) -> &str {
&*self self
} }
} }

View File

@ -158,7 +158,7 @@ impl<'v> NameView<'v> {
let string = &self.name[self.end..]; let string = &self.name[self.end..];
let bytes = string.as_bytes(); let bytes = string.as_bytes();
let shift = match bytes.get(0) { let shift = match bytes.first() {
None | Some(b'=') => 0, None | Some(b'=') => 0,
Some(b'[') => match memchr::memchr(b']', bytes) { Some(b'[') => match memchr::memchr(b']', bytes) {
Some(j) => j + 1, Some(j) => j + 1,
@ -243,7 +243,7 @@ impl<'v> NameView<'v> {
/// ``` /// ```
pub fn key_lossy(&self) -> &'v Key { pub fn key_lossy(&self) -> &'v Key {
let view = &self.name[self.start..self.end]; let view = &self.name[self.start..self.end];
let key = match view.as_bytes().get(0) { let key = match view.as_bytes().first() {
Some(b'.') => &view[1..], Some(b'.') => &view[1..],
Some(b'[') if view.ends_with(']') => &view[1..view.len() - 1], Some(b'[') if view.ends_with(']') => &view[1..view.len() - 1],
Some(b'[') if self.is_at_last() => &view[1..], Some(b'[') if self.is_at_last() => &view[1..],

View File

@ -93,8 +93,8 @@ impl<'v, T: FromForm<'v>> FromForm<'v> for Strict<T> {
type Context = T::Context; type Context = T::Context;
#[inline(always)] #[inline(always)]
fn init(opts: Options) -> Self::Context { fn init(_: Options) -> Self::Context {
T::init(Options { strict: true, ..opts }) T::init(Options { strict: true })
} }
#[inline(always)] #[inline(always)]

View File

@ -356,6 +356,27 @@ impl<'v> TempFile<'v> {
} }
} }
/// Returns whether the file is empty.
///
/// This is equivalent to `file.len() == 0`.
///
/// This method does not perform any system calls.
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// use rocket::fs::TempFile;
///
/// #[post("/", data = "<file>")]
/// fn handler(file: TempFile<'_>) {
/// if file.is_empty() {
/// assert_eq!(file.len(), 0);
/// }
/// }
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the size, in bytes, of the file. /// Returns the size, in bytes, of the file.
/// ///
/// This method does not perform any system calls. /// This method does not perform any system calls.
@ -490,7 +511,7 @@ impl<'v> TempFile<'v> {
) -> io::Result<Capped<TempFile<'a>>> { ) -> io::Result<Capped<TempFile<'a>>> {
let limit = content_type.as_ref() let limit = content_type.as_ref()
.and_then(|ct| ct.extension()) .and_then(|ct| ct.extension())
.and_then(|ext| req.limits().find(&["file", ext.as_str()])) .and_then(|ext| req.limits().find(["file", ext.as_str()]))
.or_else(|| req.limits().get("file")) .or_else(|| req.limits().get("file"))
.unwrap_or(Limits::FILE); .unwrap_or(Limits::FILE);

View File

@ -252,11 +252,11 @@ impl<'a> CookieJar<'a> {
/// ``` /// ```
pub fn get_pending(&self, name: &str) -> Option<Cookie<'static>> { pub fn get_pending(&self, name: &str) -> Option<Cookie<'static>> {
let ops = self.ops.lock(); let ops = self.ops.lock();
for op in ops.iter().rev().filter(|op| op.cookie().name() == name) { if let Some(op) = ops.iter().rev().find(|op| op.cookie().name() == name) {
match op { return match op {
Op::Add(c, _) => return Some(c.clone()), Op::Add(c, _) => Some(c.clone()),
Op::Remove(_) => return None, Op::Remove(_) => None,
} };
} }
drop(ops); drop(ops);

View File

@ -6,11 +6,6 @@
#![cfg_attr(nightly, feature(doc_cfg))] #![cfg_attr(nightly, feature(doc_cfg))]
#![cfg_attr(nightly, feature(decl_macro))] #![cfg_attr(nightly, feature(decl_macro))]
#![warn(rust_2018_idioms)]
// #![warn(missing_docs)]
#![allow(async_fn_in_trait)]
#![allow(refining_impl_trait)]
//! # Rocket - Core API Documentation //! # Rocket - Core API Documentation
//! //!
//! Hello, and welcome to the core Rocket API documentation! //! Hello, and welcome to the core Rocket API documentation!
@ -209,7 +204,7 @@ mod erased;
#[doc(inline)] #[doc(inline)]
pub use async_trait::async_trait; pub use async_trait::async_trait;
const WORKER_PREFIX: &'static str = "rocket-worker"; const WORKER_PREFIX: &str = "rocket-worker";
/// Creates a [`Rocket`] instance with the default config provider: aliases /// Creates a [`Rocket`] instance with the default config provider: aliases
/// [`Rocket::build()`]. /// [`Rocket::build()`].

View File

@ -33,7 +33,7 @@ async fn catch_handle<Fut, T, F>(name: Option<&str>, run: F) -> Option<T>
} }
let run = std::panic::AssertUnwindSafe(run); let run = std::panic::AssertUnwindSafe(run);
let fut = std::panic::catch_unwind(move || run()) let fut = std::panic::catch_unwind(run)
.map_err(|e| panic_info!(name, e)) .map_err(|e| panic_info!(name, e))
.ok()?; .ok()?;

View File

@ -41,7 +41,7 @@ impl<A: Connection, B: Connection> Connection for Either<A, B> {
impl Certificates<'_> { impl Certificates<'_> {
pub fn into_owned(self) -> Certificates<'static> { pub fn into_owned(self) -> Certificates<'static> {
let cow = self.0.into_iter() let cow = self.0.iter()
.map(|der| der.clone().into_owned()) .map(|der| der.clone().into_owned())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.into(); .into();

View File

@ -23,7 +23,10 @@ impl DefaultListener {
pub(crate) fn base_bindable(&self) -> Result<BaseBindable, crate::Error> { pub(crate) fn base_bindable(&self) -> Result<BaseBindable, crate::Error> {
match &self.address { match &self.address {
Endpoint::Tcp(mut address) => { Endpoint::Tcp(mut address) => {
self.port.map(|port| address.set_port(port)); if let Some(port) = self.port {
address.set_port(port);
}
Ok(BaseBindable::Left(address)) Ok(BaseBindable::Left(address))
}, },
#[cfg(unix)] #[cfg(unix)]

View File

@ -112,10 +112,10 @@ impl Endpoint {
pub fn downcast<T: 'static>(&self) -> Option<&T> { pub fn downcast<T: 'static>(&self) -> Option<&T> {
match self { match self {
Endpoint::Tcp(addr) => (&*addr as &dyn Any).downcast_ref(), Endpoint::Tcp(addr) => (addr as &dyn Any).downcast_ref(),
Endpoint::Quic(addr) => (&*addr as &dyn Any).downcast_ref(), Endpoint::Quic(addr) => (addr as &dyn Any).downcast_ref(),
Endpoint::Unix(addr) => (&*addr as &dyn Any).downcast_ref(), Endpoint::Unix(addr) => (addr as &dyn Any).downcast_ref(),
Endpoint::Custom(addr) => (&*addr as &dyn Any).downcast_ref(), Endpoint::Custom(addr) => (addr as &dyn Any).downcast_ref(),
Endpoint::Tls(inner, ..) => inner.downcast(), Endpoint::Tls(inner, ..) => inner.downcast(),
} }
} }

View File

@ -100,7 +100,7 @@ impl<L> Listener for TlsListener<L>
type Connection = TlsStream<L::Connection>; type Connection = TlsStream<L::Connection>;
async fn accept(&self) -> io::Result<Self::Accept> { async fn accept(&self) -> io::Result<Self::Accept> {
Ok(self.listener.accept().await?) self.listener.accept().await
} }
async fn connect(&self, conn: L::Connection) -> io::Result<Self::Connection> { async fn connect(&self, conn: L::Connection) -> io::Result<Self::Connection> {

View File

@ -92,14 +92,14 @@ impl Client {
pub(crate) fn _with_raw_cookies<F, T>(&self, f: F) -> T pub(crate) fn _with_raw_cookies<F, T>(&self, f: F) -> T
where F: FnOnce(&cookie::CookieJar) -> T where F: FnOnce(&cookie::CookieJar) -> T
{ {
f(&*self.cookies.read()) f(&self.cookies.read())
} }
#[inline(always)] #[inline(always)]
pub(crate) fn _with_raw_cookies_mut<F, T>(&self, f: F) -> T pub(crate) fn _with_raw_cookies_mut<F, T>(&self, f: F) -> T
where F: FnOnce(&mut cookie::CookieJar) -> T where F: FnOnce(&mut cookie::CookieJar) -> T
{ {
f(&mut *self.cookies.write()) f(&mut self.cookies.write())
} }
#[inline(always)] #[inline(always)]

View File

@ -126,15 +126,15 @@ impl LocalResponse<'_> {
} }
#[cfg(feature = "json")] #[cfg(feature = "json")]
async fn _into_json<T: Send + 'static>(self) -> Option<T> async fn _into_json<T>(self) -> Option<T>
where T: serde::de::DeserializeOwned where T: Send + serde::de::DeserializeOwned + 'static
{ {
self.blocking_read(|r| serde_json::from_reader(r)).await?.ok() self.blocking_read(|r| serde_json::from_reader(r)).await?.ok()
} }
#[cfg(feature = "msgpack")] #[cfg(feature = "msgpack")]
async fn _into_msgpack<T: Send + 'static>(self) -> Option<T> async fn _into_msgpack<T>(self) -> Option<T>
where T: serde::de::DeserializeOwned where T: Send + serde::de::DeserializeOwned + 'static
{ {
self.blocking_read(|r| rmp_serde::from_read(r)).await?.ok() self.blocking_read(|r| rmp_serde::from_read(r)).await?.ok()
} }
@ -180,7 +180,7 @@ impl LocalResponse<'_> {
// TODO: Try to fill as much as the buffer before send it off? // TODO: Try to fill as much as the buffer before send it off?
let mut buf = Vec::with_capacity(1024); let mut buf = Vec::with_capacity(1024);
match self.read_buf(&mut buf).await { match self.read_buf(&mut buf).await {
Ok(n) if n == 0 => break, Ok(0) => break,
Ok(_) => tx.send(Ok(buf)).await.ok()?, Ok(_) => tx.send(Ok(buf)).await.ok()?,
Err(e) => { Err(e) => {
tx.send(Err(e)).await.ok()?; tx.send(Err(e)).await.ok()?;

View File

@ -56,7 +56,7 @@ pub struct LocalResponse<'c> {
impl LocalResponse<'_> { impl LocalResponse<'_> {
fn _response(&self) -> &Response<'_> { fn _response(&self) -> &Response<'_> {
&self.inner._response() self.inner._response()
} }
pub(crate) fn _cookies(&self) -> &CookieJar<'_> { pub(crate) fn _cookies(&self) -> &CookieJar<'_> {

View File

@ -120,7 +120,7 @@ impl log::Log for RocketLogger {
// Downgrade a physical launch `warn` to logical `info`. // Downgrade a physical launch `warn` to logical `info`.
let level = is_launch_record(record.metadata()) let level = is_launch_record(record.metadata())
.then(|| log::Level::Info) .then_some(log::Level::Info)
.unwrap_or_else(|| record.level()); .unwrap_or_else(|| record.level());
match level { match level {

View File

@ -126,8 +126,8 @@ impl<'r> FromRequest<'r> for Certificate<'r> {
impl<'a> Certificate<'a> { impl<'a> Certificate<'a> {
/// PRIVATE: For internal Rocket use only! /// PRIVATE: For internal Rocket use only!
fn parse<'r>(chain: &'r [CertificateDer<'r>]) -> Result<Certificate<'r>> { fn parse<'r>(chain: &'r [CertificateDer<'r>]) -> Result<Certificate<'r>> {
let data = chain.first().ok_or_else(|| Error::Empty)?; let data = chain.first().ok_or(Error::Empty)?;
let x509 = Certificate::parse_one(&*data)?; let x509 = Certificate::parse_one(data)?;
Ok(Certificate { x509, data }) Ok(Certificate { x509, data })
} }
@ -267,7 +267,7 @@ impl<'a> Certificate<'a> {
/// } /// }
/// ``` /// ```
pub fn extensions(&self) -> &[x509::X509Extension<'a>] { pub fn extensions(&self) -> &[x509::X509Extension<'a>] {
&self.inner().extensions() self.inner().extensions()
} }
/// Checks if the certificate has the serial number `number`. /// Checks if the certificate has the serial number `number`.
@ -318,7 +318,7 @@ impl<'a> Certificate<'a> {
/// } /// }
/// ``` /// ```
pub fn as_bytes(&self) -> &'a [u8] { pub fn as_bytes(&self) -> &'a [u8] {
&*self.data self.data
} }
} }

View File

@ -154,7 +154,7 @@ impl MtlsConfig {
pub fn ca_certs(&self) -> either::Either<std::path::PathBuf, &[u8]> { pub fn ca_certs(&self) -> either::Either<std::path::PathBuf, &[u8]> {
match &self.ca_certs { match &self.ca_certs {
Either::Left(path) => either::Either::Left(path.relative()), Either::Left(path) => either::Either::Left(path.relative()),
Either::Right(bytes) => either::Either::Right(&bytes), Either::Right(bytes) => either::Either::Right(bytes),
} }
} }

View File

@ -354,7 +354,7 @@ impl<'r> Request<'r> {
/// ``` /// ```
#[inline(always)] #[inline(always)]
pub fn set_remote(&mut self, endpoint: Endpoint) { pub fn set_remote(&mut self, endpoint: Endpoint) {
self.connection.peer_endpoint = Some(endpoint.into()); self.connection.peer_endpoint = Some(endpoint);
} }
/// Returns the IP address of the configured /// Returns the IP address of the configured
@ -723,7 +723,7 @@ impl<'r> Request<'r> {
/// ``` /// ```
#[inline(always)] #[inline(always)]
pub fn rocket(&self) -> &'r Rocket<Orbit> { pub fn rocket(&self) -> &'r Rocket<Orbit> {
&self.state.rocket self.state.rocket
} }
/// Returns the configured application data limits. /// Returns the configured application data limits.
@ -988,7 +988,7 @@ impl<'r> Request<'r> {
pub fn query_value<'a, T>(&'a self, name: &str) -> Option<form::Result<'a, T>> pub fn query_value<'a, T>(&'a self, name: &str) -> Option<form::Result<'a, T>>
where T: FromForm<'a> where T: FromForm<'a>
{ {
if self.query_fields().find(|f| f.name == name).is_none() { if !self.query_fields().any(|f| f.name == name) {
return None; return None;
} }
@ -1112,7 +1112,7 @@ impl<'r> Request<'r> {
}) })
.unwrap_or_else(|| { .unwrap_or_else(|| {
errors.push(RequestError::InvalidUri(hyper.uri.clone())); errors.push(RequestError::InvalidUri(hyper.uri.clone()));
Origin::ROOT Origin::root().clone()
}); });
// Construct the request object; fill in metadata and headers next. // Construct the request object; fill in metadata and headers next.

View File

@ -53,7 +53,7 @@ use crate::http::Status;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct Created<R>(Cow<'static, str>, Option<R>, Option<u64>); pub struct Created<R>(Cow<'static, str>, Option<R>, Option<u64>);
impl<'r, R> Created<R> { impl<R> Created<R> {
/// Constructs a `Created` response with a `location` and no body. /// Constructs a `Created` response with a `location` and no body.
/// ///
/// # Example /// # Example

View File

@ -177,11 +177,10 @@ fn skip<T: AsRef<[u8]> + Unpin>(buf: &mut Take<Cursor<T>>) {
buf.get_mut().set_position(pos + 1); buf.get_mut().set_position(pos + 1);
} }
} }
_ => return, _ => (),
} }
} }
macro_rules! dbg_assert_ready { macro_rules! dbg_assert_ready {
($e:expr) => ({ ($e:expr) => ({
let poll = $e; let poll = $e;

View File

@ -698,7 +698,7 @@ impl Rocket<Ignite> {
rocket.shutdown.spawn_listener(&rocket.config.shutdown); rocket.shutdown.spawn_listener(&rocket.config.shutdown);
if let Err(e) = tokio::spawn(Rocket::liftoff(rocket.clone())).await { if let Err(e) = tokio::spawn(Rocket::liftoff(rocket.clone())).await {
let rocket = rocket.try_wait_shutdown().await; let rocket = rocket.try_wait_shutdown().await.map(Box::new);
return Err(ErrorKind::Liftoff(rocket, Box::new(e)).into()); return Err(ErrorKind::Liftoff(rocket, Box::new(e)).into());
} }
@ -734,7 +734,7 @@ impl Rocket<Orbit> {
info!("Shutting down. Waiting for shutdown fairings and pending I/O..."); info!("Shutting down. Waiting for shutdown fairings and pending I/O...");
tokio::spawn({ tokio::spawn({
let rocket = self.clone(); let rocket = self.clone();
async move { rocket.fairings.handle_shutdown(&*rocket).await } async move { rocket.fairings.handle_shutdown(&rocket).await }
}); });
let config = &self.config.shutdown; let config = &self.config.shutdown;

View File

@ -148,14 +148,14 @@ impl Catcher {
impl Collide for Route { impl Collide for Route {
#[inline(always)] #[inline(always)]
fn collides_with(&self, other: &Route) -> bool { fn collides_with(&self, other: &Route) -> bool {
Route::collides_with(&self, other) Route::collides_with(self, other)
} }
} }
impl Collide for Catcher { impl Collide for Catcher {
#[inline(always)] #[inline(always)]
fn collides_with(&self, other: &Self) -> bool { fn collides_with(&self, other: &Self) -> bool {
Catcher::collides_with(&self, other) Catcher::collides_with(self, other)
} }
} }

View File

@ -150,11 +150,9 @@ fn paths_match(route: &Route, req: &Request<'_>) -> bool {
} }
// requests with longer paths only match if we have dynamic trail (<a..>). // requests with longer paths only match if we have dynamic trail (<a..>).
if req_segments.num() > route_segments.len() { if req_segments.num() > route_segments.len() && !route.uri.metadata.dynamic_trail {
if !route.uri.metadata.dynamic_trail {
return false; return false;
} }
}
// We've checked everything beyond the zip of their lengths already. // We've checked everything beyond the zip of their lengths already.
for (route_seg, req_seg) in route_segments.iter().zip(req_segments.clone()) { for (route_seg, req_seg) in route_segments.iter().zip(req_segments.clone()) {

View File

@ -47,7 +47,7 @@ impl Rocket<Orbit> {
}) })
).await; ).await;
let io_handler = response.to_io_handler(Rocket::extract_io_handler); let io_handler = response.make_io_handler(Rocket::extract_io_handler);
if let (Some(handler), Some(upgrade)) = (io_handler, upgrade) { if let (Some(handler), Some(upgrade)) = (io_handler, upgrade) {
let upgrade = upgrade.map_ok(IoStream::from).map_err(io::Error::other); let upgrade = upgrade.map_ok(IoStream::from).map_err(io::Error::other);
tokio::task::spawn(io_handler_task(upgrade, handler)); tokio::task::spawn(io_handler_task(upgrade, handler));
@ -208,7 +208,7 @@ impl Rocket<Orbit> {
let meta = ConnectionMeta::from(&conn); let meta = ConnectionMeta::from(&conn);
let rx = conn.rx.cancellable(rocket.shutdown.clone()); let rx = conn.rx.cancellable(rocket.shutdown.clone());
let response = rocket.clone() let response = rocket.clone()
.service(conn.parts, rx, None, ConnectionMeta::from(meta)) .service(conn.parts, rx, None, meta)
.map_err(io::Error::other) .map_err(io::Error::other)
.io_unless(rocket.shutdown.mercy.clone()) .io_unless(rocket.shutdown.mercy.clone())
.await?; .await?;

View File

@ -4,7 +4,6 @@ use std::fmt;
use std::borrow::Cow; use std::borrow::Cow;
use indexmap::IndexMap; use indexmap::IndexMap;
use rocket_http::{ext::IntoCollection, private::SmallVec};
use time::Duration; use time::Duration;
use crate::http::{Header, uri::Absolute, uncased::Uncased}; use crate::http::{Header, uri::Absolute, uncased::Uncased};
@ -410,19 +409,15 @@ impl From<&XssFilter> for Header<'static> {
/// that you don't want to leak information to these domains. /// that you don't want to leak information to these domains.
/// ///
/// [X-DNS-Prefetch-Control]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-DNS-Prefetch-Control /// [X-DNS-Prefetch-Control]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-DNS-Prefetch-Control
#[derive(Default)]
pub enum Prefetch { pub enum Prefetch {
/// Enables DNS prefetching. This is the browser default. /// Enables DNS prefetching. This is the browser default.
On, On,
/// Disables DNS prefetching. This is the shield policy default. /// Disables DNS prefetching. This is the shield policy default.
#[default]
Off, Off,
} }
impl Default for Prefetch {
fn default() -> Prefetch {
Prefetch::Off
}
}
impl From<&Prefetch> for Header<'static> { impl From<&Prefetch> for Header<'static> {
fn from(prefetch: &Prefetch) -> Self { fn from(prefetch: &Prefetch) -> Self {
let policy_string = match prefetch { let policy_string = match prefetch {
@ -475,7 +470,7 @@ impl From<&Prefetch> for Header<'static> {
/// ///
/// [Permissions-Policy]: https://github.com/w3c/webappsec-permissions-policy/blob/a45df7b237e2a85e1909d7f226ca4eb4ce5095ba/permissions-policy-explainer.md /// [Permissions-Policy]: https://github.com/w3c/webappsec-permissions-policy/blob/a45df7b237e2a85e1909d7f226ca4eb4ce5095ba/permissions-policy-explainer.md
#[derive(PartialEq, Clone)] #[derive(PartialEq, Clone)]
pub struct Permission(IndexMap<Feature, Option<SmallVec<[Allow; 1]>>>); pub struct Permission(IndexMap<Feature, Vec<Allow>>);
impl Default for Permission { impl Default for Permission {
/// The default `Permission` policy blocks access to the `interest-cohort` /// The default `Permission` policy blocks access to the `interest-cohort`
@ -514,7 +509,7 @@ impl Permission {
/// let perm = Permission::allowed(Feature::Usb, [Allow::This, rocket]); /// let perm = Permission::allowed(Feature::Usb, [Allow::This, rocket]);
/// ``` /// ```
pub fn allowed<L>(feature: Feature, allow: L) -> Self pub fn allowed<L>(feature: Feature, allow: L) -> Self
where L: IntoCollection<Allow> where L: IntoIterator<Item = Allow>
{ {
Permission(IndexMap::new()).allow(feature, allow) Permission(IndexMap::new()).allow(feature, allow)
} }
@ -560,14 +555,9 @@ impl Permission {
/// .allow(Feature::Payment, [rocket, Allow::This]); /// .allow(Feature::Payment, [rocket, Allow::This]);
/// ``` /// ```
pub fn allow<L>(mut self, feature: Feature, allow: L) -> Self pub fn allow<L>(mut self, feature: Feature, allow: L) -> Self
where L: IntoCollection<Allow> where L: IntoIterator<Item = Allow>
{ {
let mut allow = allow.into_collection(); let mut allow: Vec<_> = allow.into_iter().collect();
if allow.contains(&Allow::Any) {
allow = Allow::Any.into_collection();
}
for allow in &allow { for allow in &allow {
if let Allow::Origin(absolute) = allow { if let Allow::Origin(absolute) = allow {
let auth = absolute.authority(); let auth = absolute.authority();
@ -577,7 +567,11 @@ impl Permission {
} }
} }
self.0.insert(feature, Some(allow)); if allow.contains(&Allow::Any) {
allow = vec![Allow::Any];
}
self.0.insert(feature, allow);
self self
} }
@ -594,7 +588,7 @@ impl Permission {
/// .block(Feature::Payment); /// .block(Feature::Payment);
/// ``` /// ```
pub fn block(mut self, feature: Feature) -> Self { pub fn block(mut self, feature: Feature) -> Self {
self.0.insert(feature, None); self.0.insert(feature, vec![]);
self self
} }
@ -612,11 +606,11 @@ impl Permission {
/// assert_eq!(perm.get(Feature::Usb).unwrap(), &[Allow::Any]); /// assert_eq!(perm.get(Feature::Usb).unwrap(), &[Allow::Any]);
/// ``` /// ```
pub fn get(&self, feature: Feature) -> Option<&[Allow]> { pub fn get(&self, feature: Feature) -> Option<&[Allow]> {
self.0.get(&feature)?.as_deref() Some(self.0.get(&feature)?)
} }
/// Returns an iterator over the pairs of features and their allow lists, /// Returns an iterator over the pairs of features and their allow lists,
/// `None` if the feature is blocked. /// empty if the feature is blocked.
/// ///
/// Features are returned in the order in which they were first added. /// Features are returned in the order in which they were first added.
/// ///
@ -635,13 +629,13 @@ impl Permission {
/// let perms: Vec<_> = perm.iter().collect(); /// let perms: Vec<_> = perm.iter().collect();
/// assert_eq!(perms.len(), 3); /// assert_eq!(perms.len(), 3);
/// assert_eq!(perms, vec![ /// assert_eq!(perms, vec![
/// (Feature::Camera, Some(&[Allow::Any][..])), /// (Feature::Camera, &[Allow::Any][..]),
/// (Feature::Gyroscope, Some(&[Allow::This, Allow::Origin(foo)][..])), /// (Feature::Gyroscope, &[Allow::This, Allow::Origin(foo)][..]),
/// (Feature::Payment, None), /// (Feature::Payment, &[][..]),
/// ]); /// ]);
/// ``` /// ```
pub fn iter(&self) -> impl Iterator<Item = (Feature, Option<&[Allow]>)> { pub fn iter(&self) -> impl Iterator<Item = (Feature, &[Allow])> {
self.0.iter().map(|(feature, list)| (*feature, list.as_deref())) self.0.iter().map(|(feature, list)| (*feature, &**list))
} }
} }
@ -658,9 +652,7 @@ impl From<&Permission> for Header<'static> {
let value = perm.0.iter() let value = perm.0.iter()
.map(|(feature, allow)| { .map(|(feature, allow)| {
let list = allow.as_ref() let list = allow.iter()
.into_iter()
.flatten()
.map(|origin| origin.rendered()) .map(|origin| origin.rendered())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" "); .join(" ");
@ -724,6 +716,16 @@ impl Allow {
} }
} }
impl IntoIterator for Allow {
type Item = Self;
type IntoIter = std::iter::Once<Self>;
fn into_iter(self) -> Self::IntoIter {
std::iter::once(self)
}
}
/// A browser feature that can be enabled or blocked via [`Permission`]. /// A browser feature that can be enabled or blocked via [`Permission`].
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
#[non_exhaustive] #[non_exhaustive]

View File

@ -316,7 +316,7 @@ impl TlsConfig {
pub fn certs(&self) -> either::Either<std::path::PathBuf, &[u8]> { pub fn certs(&self) -> either::Either<std::path::PathBuf, &[u8]> {
match &self.certs { match &self.certs {
Either::Left(path) => either::Either::Left(path.relative()), Either::Left(path) => either::Either::Left(path.relative()),
Either::Right(bytes) => either::Either::Right(&bytes), Either::Right(bytes) => either::Either::Right(bytes),
} }
} }
@ -344,7 +344,7 @@ impl TlsConfig {
pub fn key(&self) -> either::Either<std::path::PathBuf, &[u8]> { pub fn key(&self) -> either::Either<std::path::PathBuf, &[u8]> {
match &self.key { match &self.key {
Either::Left(path) => either::Either::Left(path.relative()), Either::Left(path) => either::Either::Left(path.relative()),
Either::Right(bytes) => either::Either::Right(&bytes), Either::Right(bytes) => either::Either::Right(bytes),
} }
} }

View File

@ -63,7 +63,7 @@ impl<R: AsyncRead> ReaderStream<R> {
/// [`Stream`]: futures_core::Stream /// [`Stream`]: futures_core::Stream
pub fn with_capacity(reader: R, capacity: usize) -> Self { pub fn with_capacity(reader: R, capacity: usize) -> Self {
ReaderStream { ReaderStream {
reader: reader, reader,
buf: BytesMut::with_capacity(capacity), buf: BytesMut::with_capacity(capacity),
capacity, capacity,
done: false, done: false,

View File

@ -4,7 +4,7 @@ use rocket::request::Request;
use rocket::http::CookieJar; use rocket::http::CookieJar;
#[catch(404)] #[catch(404)]
fn not_found(request: &Request) -> &'static str { fn not_found(request: &Request<'_>) -> &'static str {
request.cookies().add(("not_found", "404")); request.cookies().add(("not_found", "404"));
"404 - Not Found" "404 - Not Found"
} }

View File

@ -1,7 +1,7 @@
#[macro_use] extern crate rocket; #[macro_use] extern crate rocket;
#[get("/")] #[get("/")]
fn inspect_proto(proto: rocket::http::ProxyProto) -> String { fn inspect_proto(proto: rocket::http::ProxyProto<'_>) -> String {
proto.to_string() proto.to_string()
} }

View File

@ -68,12 +68,12 @@ fn test_form_validation_context() {
count(c, n, kind, false) count(c, n, kind, false)
} }
let c = errors::<Cat>("name=littlebobby"); let c = errors::<Cat<'_>>("name=littlebobby");
assert_eq!(exact(&c, "nick", Missing), 1); assert_eq!(exact(&c, "nick", Missing), 1);
assert_eq!(fuzzy(&c, "nick", Missing), 1); assert_eq!(fuzzy(&c, "nick", Missing), 1);
assert_eq!(fuzzy(&c, "nick", None), 1); assert_eq!(fuzzy(&c, "nick", None), 1);
let c = errors::<Person>("cats[0].name=Bob"); let c = errors::<Person<'_>>("cats[0].name=Bob");
assert_eq!(exact(&c, "kitty", None), 1); assert_eq!(exact(&c, "kitty", None), 1);
assert_eq!(exact(&c, "kitty", Missing), 1); assert_eq!(exact(&c, "kitty", Missing), 1);
assert_eq!(exact(&c, "cats[0].nick", None), 1); assert_eq!(exact(&c, "cats[0].nick", None), 1);
@ -91,7 +91,7 @@ fn test_form_validation_context() {
assert_eq!(fuzzy(&c, "dog.name", Missing), 1); assert_eq!(fuzzy(&c, "dog.name", Missing), 1);
assert_eq!(fuzzy(&c, "dog", None), 1); assert_eq!(fuzzy(&c, "dog", None), 1);
let c = errors::<Person>("cats[0].name=Bob&cats[0].nick=kit&kitty.name=Hi"); let c = errors::<Person<'_>>("cats[0].name=Bob&cats[0].nick=kit&kitty.name=Hi");
assert_eq!(exact(&c, "kitty.nick", Missing), 1); assert_eq!(exact(&c, "kitty.nick", Missing), 1);
assert_eq!(exact(&c, "kitty", None), 0); assert_eq!(exact(&c, "kitty", None), 0);
assert_eq!(exact(&c, "dog", Missing), 1); assert_eq!(exact(&c, "dog", Missing), 1);
@ -109,7 +109,7 @@ fn test_form_validation_context() {
assert_eq!(fuzzy(&c, "cats[0].nick", None), 1); assert_eq!(fuzzy(&c, "cats[0].nick", None), 1);
assert_eq!(exact(&c, "cats[0].name", None), 1); assert_eq!(exact(&c, "cats[0].name", None), 1);
let c = errors::<Person>("kitty.name=Michael"); let c = errors::<Person<'_>>("kitty.name=Michael");
assert_eq!(exact(&c, "kitty.nick", Missing), 1); assert_eq!(exact(&c, "kitty.nick", Missing), 1);
assert_eq!(exact(&c, "dog", Missing), 1); assert_eq!(exact(&c, "dog", Missing), 1);
assert_eq!(exact(&c, "cats[0].name", None), 0); assert_eq!(exact(&c, "cats[0].name", None), 0);
@ -125,7 +125,7 @@ fn test_form_validation_context() {
assert_eq!(exact(&c, "cats[0].name", None), 0); assert_eq!(exact(&c, "cats[0].name", None), 0);
assert_eq!(exact(&c, "cats[0].nick", None), 0); assert_eq!(exact(&c, "cats[0].nick", None), 0);
let c = errors::<Person>("kitty.name=Michael&kitty.nick=kittykat&dog.name=woofy"); let c = errors::<Person<'_>>("kitty.name=Michael&kitty.nick=kittykat&dog.name=woofy");
assert_eq!(c.iter().count(), 1); assert_eq!(c.iter().count(), 1);
assert_eq!(exact(&c, "cats", None), 1); assert_eq!(exact(&c, "cats", None), 1);
assert_eq!(exact(&c, "cats", InvalidLength { min: Some(1), max: None }), 1); assert_eq!(exact(&c, "cats", InvalidLength { min: Some(1), max: None }), 1);

View File

@ -73,7 +73,7 @@ fn catches_early_route_panic() {
#[test] #[test]
fn catches_early_catcher_panic() { fn catches_early_catcher_panic() {
fn pre_future_catcher<'r>(_: Status, _: &'r Request) -> catcher::BoxFuture<'r> { fn pre_future_catcher<'r>(_: Status, _: &'r Request<'_>) -> catcher::BoxFuture<'r> {
panic!("a panicking pre-future catcher") panic!("a panicking pre-future catcher")
} }

View File

@ -19,11 +19,11 @@ impl<'r, 'o: 'r, R: Responder<'r, 'o>> Responder<'r, 'o> for CustomResponder<'r,
} }
#[get("/unit_state")] #[get("/unit_state")]
fn unit_state(state: &State<SomeState>) -> CustomResponder<()> { fn unit_state(state: &State<SomeState>) -> CustomResponder<'_, ()> {
CustomResponder { responder: (), state: &*state } CustomResponder { responder: (), state: &*state }
} }
#[get("/string_state")] #[get("/string_state")]
fn string_state(state: &State<SomeState>) -> CustomResponder<String> { fn string_state(state: &State<SomeState>) -> CustomResponder<'_, String> {
CustomResponder { responder: "".to_string(), state: &*state } CustomResponder { responder: "".to_string(), state: &*state }
} }

View File

@ -77,7 +77,7 @@ async fn messages() {
} }
let data: Message = json::from_str(&line[5..]).expect("message JSON"); let data: Message = json::from_str(&line[5..]).expect("message JSON");
if &data == &shutdown_message { if data == shutdown_message {
// Test shutdown listening: this should end the stream. // Test shutdown listening: this should end the stream.
client.rocket().shutdown().notify(); client.rocket().shutdown().notify();
continue; continue;

View File

@ -4,7 +4,7 @@ async fn test_config(profile: &str) {
let provider = Config::figment().select(profile); let provider = Config::figment().select(profile);
let rocket = rocket::custom(provider).ignite().await.unwrap(); let rocket = rocket::custom(provider).ignite().await.unwrap();
let config = rocket.config(); let config = rocket.config();
match &*profile { match profile {
"debug" => { "debug" => {
assert_eq!(config.workers, 1); assert_eq!(config.workers, 1);
assert_eq!(config.keep_alive, 0); assert_eq!(config.keep_alive, 0);

View File

@ -54,7 +54,7 @@ fn login(_user: User) -> Redirect {
#[get("/login", rank = 2)] #[get("/login", rank = 2)]
fn login_page(flash: Option<FlashMessage<'_>>) -> Template { fn login_page(flash: Option<FlashMessage<'_>>) -> Template {
Template::render("login", &flash) Template::render("login", flash)
} }
#[post("/login", data = "<login>")] #[post("/login", data = "<login>")]

View File

@ -5,8 +5,7 @@ use rocket::http::{Status, Cookie, ContentType};
fn user_id_cookie(response: &LocalResponse<'_>) -> Option<Cookie<'static>> { fn user_id_cookie(response: &LocalResponse<'_>) -> Option<Cookie<'static>> {
let cookie = response.headers() let cookie = response.headers()
.get("Set-Cookie") .get("Set-Cookie")
.filter(|v| v.starts_with("user_id")) .find(|v| v.starts_with("user_id"))
.nth(0)
.and_then(|val| Cookie::parse_encoded(val).ok()); .and_then(|val| Cookie::parse_encoded(val).ok());
cookie.map(|c| c.into_owned()) cookie.map(|c| c.into_owned())

View File

@ -80,7 +80,7 @@ async fn delete(mut db: Connection<Db>, id: i64) -> Result<Option<()>> {
.execute(&mut db) .execute(&mut db)
.await?; .await?;
Ok((affected == 1).then(|| ())) Ok((affected == 1).then_some(()))
} }
#[delete("/")] #[delete("/")]

View File

@ -73,7 +73,7 @@ async fn delete(db: Db, id: i32) -> Result<Option<()>> {
.execute(conn) .execute(conn)
}).await?; }).await?;
Ok((affected == 1).then(|| ())) Ok((affected == 1).then_some(()))
} }
#[delete("/")] #[delete("/")]

View File

@ -61,7 +61,7 @@ async fn delete(db: Db, id: i64) -> Result<Option<()>> {
conn.execute("DELETE FROM posts WHERE id = ?1", params![id]) conn.execute("DELETE FROM posts WHERE id = ?1", params![id])
}).await?; }).await?;
Ok((affected == 1).then(|| ())) Ok((affected == 1).then_some(()))
} }
#[delete("/")] #[delete("/")]

View File

@ -3,7 +3,7 @@ use rocket::fairing::{self, AdHoc};
use rocket::response::status::Created; use rocket::response::status::Created;
use rocket::serde::{Serialize, Deserialize, json::Json}; use rocket::serde::{Serialize, Deserialize, json::Json};
use rocket_db_pools::{sqlx, Database, Connection}; use rocket_db_pools::{Database, Connection};
use futures::{stream::TryStreamExt, future::TryFutureExt}; use futures::{stream::TryStreamExt, future::TryFutureExt};
@ -63,7 +63,7 @@ async fn delete(mut db: Connection<Db>, id: i64) -> Result<Option<()>> {
.execute(&mut **db) .execute(&mut **db)
.await?; .await?;
Ok((result.rows_affected() == 1).then(|| ())) Ok((result.rows_affected() == 1).then_some(()))
} }
#[delete("/")] #[delete("/")]

View File

@ -46,7 +46,7 @@ fn test(base: &str, stage: AdHoc) {
for _ in 1..=N { for _ in 1..=N {
// Get a valid ID from the index. // Get a valid ID from the index.
let list = client.get(base).dispatch().into_json::<Vec<i64>>().unwrap(); let list = client.get(base).dispatch().into_json::<Vec<i64>>().unwrap();
let id = list.get(0).expect("have post"); let id = list.first().expect("have post");
// Delete that post. // Delete that post.
let response = client.delete(format!("{}/{}", base, id)).dispatch(); let response = client.delete(format!("{}/{}", base, id)).dispatch();

View File

@ -10,7 +10,7 @@ fn test_hello() {
let response = client.get(uri).dispatch(); let response = client.get(uri).dispatch();
assert_eq!(response.status(), Status::Ok); assert_eq!(response.status(), Status::Ok);
assert_eq!(response.into_string().unwrap(), super::hello(name.into(), age)); assert_eq!(response.into_string().unwrap(), super::hello(name, age));
} }
#[test] #[test]
@ -54,7 +54,8 @@ fn test_hello_invalid_age() {
assert_eq!(response.into_string().unwrap(), expected.1); assert_eq!(response.into_string().unwrap(), expected.1);
} }
for path in &["foo/bar/baz"] { {
let path = &"foo/bar/baz";
let request = client.get(format!("/hello/{}", path)); let request = client.get(format!("/hello/{}", path));
let expected = super::hello_not_found(request.inner()); let expected = super::hello_not_found(request.inner());
let response = request.dispatch(); let response = request.dispatch();

View File

@ -1,6 +1,6 @@
use super::*; use super::*;
use rocket::local::blocking::Client; use rocket::local::blocking::Client;
use rocket::http::{ContentType, Status}; use rocket::http::ContentType;
fn test(uri: &str, content_type: ContentType, status: Status, body: String) { fn test(uri: &str, content_type: ContentType, status: Status, body: String) {
let client = Client::tracked(rocket()).unwrap(); let client = Client::tracked(rocket()).unwrap();
@ -24,7 +24,7 @@ fn test_name() {
#[test] #[test]
fn test_echo() { fn test_echo() {
let uri = format!("/echo/echo%20this%20text"); let uri = "/echo/echo%20this%20text".to_string();
test(&uri, ContentType::Plain, Status::Ok, "echo this text".into()); test(&uri, ContentType::Plain, Status::Ok, "echo this text".into());
} }

View File

@ -1,7 +1,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use rand::{self, Rng}; use rand::Rng;
use rocket::request::FromParam; use rocket::request::FromParam;
/// A _probably_ unique paste ID. /// A _probably_ unique paste ID.

View File

@ -7,7 +7,7 @@ fn test_count() {
fn get_count(client: &Client) -> usize { fn get_count(client: &Client) -> usize {
let response = client.get("/count").dispatch().into_string().unwrap(); let response = client.get("/count").dispatch().into_string().unwrap();
let count = response.split(" ").last().unwrap(); let count = response.split(' ').last().unwrap();
count.parse().unwrap() count.parse().unwrap()
} }

View File

@ -16,17 +16,16 @@ fn test_query_file<T> (path: &str, file: T, status: Status)
let body_data = response.into_bytes(); let body_data = response.into_bytes();
if let Some(filename) = file.into() { if let Some(filename) = file.into() {
let expected_data = read_file_content(filename); let expected_data = read_file_content(filename).expect(filename);
assert!(body_data.map_or(false, |s| s == expected_data)); assert!(body_data.map_or(false, |s| s == expected_data));
} }
} }
fn read_file_content(path: &str) -> Vec<u8> { fn read_file_content(path: &str) -> std::io::Result<Vec<u8>> {
let mut fp = File::open(&path).expect(&format!("Can't open {}", path));
let mut file_content = vec![]; let mut file_content = vec![];
let mut fp = File::open(path)?;
fp.read_to_end(&mut file_content).expect(&format!("Reading {} failed.", path)); fp.read_to_end(&mut file_content)?;
file_content Ok(file_content)
} }
#[test] #[test]

View File

@ -91,7 +91,7 @@ impl Fairing for Redirector {
let this = *self; let this = *self;
let shutdown = rocket.shutdown(); let shutdown = rocket.shutdown();
let _ = rocket::tokio::spawn(async move { rocket::tokio::spawn(async move {
if let Err(e) = this.try_launch(config).await { if let Err(e) = this.try_launch(config).await {
error!("Failed to start HTTP -> HTTPS redirector."); error!("Failed to start HTTP -> HTTPS redirector.");
info_!("Error: {}", e); info_!("Error: {}", e);

View File

@ -1,5 +1,5 @@
use rocket::serde::Serialize; use rocket::serde::Serialize;
use diesel::{self, result::QueryResult, prelude::*}; use diesel::{self, prelude::*};
mod schema { mod schema {
table! { table! {

View File

@ -54,7 +54,7 @@ fn test_insertion_deletion() {
// Ensure the task is what we expect. // Ensure the task is what we expect.
assert_eq!(new_tasks[0].description, "My first task"); assert_eq!(new_tasks[0].description, "My first task");
assert_eq!(new_tasks[0].completed, false); assert!(!new_tasks[0].completed);
// Issue a request to delete the task. // Issue a request to delete the task.
let id = new_tasks[0].id.unwrap(); let id = new_tasks[0].id.unwrap();
@ -63,7 +63,7 @@ fn test_insertion_deletion() {
// Ensure it's gone. // Ensure it's gone.
let final_tasks = Task::all(&conn).await.unwrap(); let final_tasks = Task::all(&conn).await.unwrap();
assert_eq!(final_tasks.len(), init_tasks.len()); assert_eq!(final_tasks.len(), init_tasks.len());
if final_tasks.len() > 0 { if !final_tasks.is_empty() {
assert_ne!(final_tasks[0].description, "My first task"); assert_ne!(final_tasks[0].description, "My first task");
} }
}) })
@ -80,15 +80,15 @@ fn test_toggle() {
.await; .await;
let task = Task::all(&conn).await.unwrap()[0].clone(); let task = Task::all(&conn).await.unwrap()[0].clone();
assert_eq!(task.completed, false); assert!(!task.completed);
// Issue a request to toggle the task; ensure it is completed. // Issue a request to toggle the task; ensure it is completed.
client.put(format!("/todo/{}", task.id.unwrap())).dispatch().await; client.put(format!("/todo/{}", task.id.unwrap())).dispatch().await;
assert_eq!(Task::all(&conn).await.unwrap()[0].completed, true); assert!(Task::all(&conn).await.unwrap()[0].completed);
// Issue a request to toggle the task; ensure it's not completed again. // Issue a request to toggle the task; ensure it's not completed again.
client.put(format!("/todo/{}", task.id.unwrap())).dispatch().await; client.put(format!("/todo/{}", task.id.unwrap())).dispatch().await;
assert_eq!(Task::all(&conn).await.unwrap()[0].completed, false); assert!(!Task::all(&conn).await.unwrap()[0].completed);
}) })
} }