mirror of https://github.com/rwf2/Rocket.git
Apply clippy suggestions.
This commit is contained in:
parent
5ebefa97c9
commit
be933ce398
|
@ -65,7 +65,7 @@ pub fn database_attr(attr: TokenStream, input: TokenStream) -> Result<TokenStrea
|
||||||
let guard_type = &invocation.type_name;
|
let guard_type = &invocation.type_name;
|
||||||
let vis = &invocation.visibility;
|
let vis = &invocation.visibility;
|
||||||
let fairing_name = format!("'{}' Database Pool", name);
|
let fairing_name = format!("'{}' Database Pool", name);
|
||||||
let span = conn_type.span().into();
|
let span = conn_type.span();
|
||||||
|
|
||||||
// A few useful paths.
|
// A few useful paths.
|
||||||
let root = quote_spanned!(span => ::rocket_sync_db_pools);
|
let root = quote_spanned!(span => ::rocket_sync_db_pools);
|
||||||
|
|
|
@ -138,7 +138,7 @@ impl<K: 'static, C: Poolable> ConnectionPool<K, C> {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub async fn get_pool<P: Phase>(rocket: &Rocket<P>) -> Option<Self> {
|
pub async fn get_pool<P: Phase>(rocket: &Rocket<P>) -> Option<Self> {
|
||||||
rocket.state::<Self>().map(|pool| pool.clone())
|
rocket.state::<Self>().cloned()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,14 +30,14 @@ pub fn _catch(
|
||||||
|
|
||||||
// This ensures that "Responder not implemented" points to the return type.
|
// This ensures that "Responder not implemented" points to the return type.
|
||||||
let return_type_span = catch.function.sig.output.ty()
|
let return_type_span = catch.function.sig.output.ty()
|
||||||
.map(|ty| ty.span().into())
|
.map(|ty| ty.span())
|
||||||
.unwrap_or(Span::call_site().into());
|
.unwrap_or_else(Span::call_site);
|
||||||
|
|
||||||
// Set the `req` and `status` spans to that of their respective function
|
// Set the `req` and `status` spans to that of their respective function
|
||||||
// arguments for a more correct `wrong type` error span. `rev` to be cute.
|
// arguments for a more correct `wrong type` error span. `rev` to be cute.
|
||||||
let codegen_args = &[__req, __status];
|
let codegen_args = &[__req, __status];
|
||||||
let inputs = catch.function.sig.inputs.iter().rev()
|
let inputs = catch.function.sig.inputs.iter().rev()
|
||||||
.zip(codegen_args.into_iter())
|
.zip(codegen_args.iter())
|
||||||
.map(|(fn_arg, codegen_arg)| match fn_arg {
|
.map(|(fn_arg, codegen_arg)| match fn_arg {
|
||||||
syn::FnArg::Receiver(_) => codegen_arg.respanned(fn_arg.span()),
|
syn::FnArg::Receiver(_) => codegen_arg.respanned(fn_arg.span()),
|
||||||
syn::FnArg::Typed(a) => codegen_arg.respanned(a.ty.span())
|
syn::FnArg::Typed(a) => codegen_arg.respanned(a.ty.span())
|
||||||
|
@ -45,7 +45,7 @@ pub fn _catch(
|
||||||
|
|
||||||
// We append `.await` to the function call if this is `async`.
|
// We append `.await` to the function call if this is `async`.
|
||||||
let dot_await = catch.function.sig.asyncness
|
let dot_await = catch.function.sig.asyncness
|
||||||
.map(|a| quote_spanned!(a.span().into() => .await));
|
.map(|a| quote_spanned!(a.span() => .await));
|
||||||
|
|
||||||
let catcher_response = quote_spanned!(return_type_span => {
|
let catcher_response = quote_spanned!(return_type_span => {
|
||||||
let ___responder = #user_catcher_fn_name(#(#inputs),*) #dot_await;
|
let ___responder = #user_catcher_fn_name(#(#inputs),*) #dot_await;
|
||||||
|
|
|
@ -36,7 +36,7 @@ impl EntryAttr for Launch {
|
||||||
};
|
};
|
||||||
|
|
||||||
let block = &f.block;
|
let block = &f.block;
|
||||||
let rocket = quote_spanned!(ty.span().into() => {
|
let rocket = quote_spanned!(ty.span() => {
|
||||||
let ___rocket: #ty = #block;
|
let ___rocket: #ty = #block;
|
||||||
let ___rocket: ::rocket::Rocket<::rocket::Build> = ___rocket;
|
let ___rocket: ::rocket::Rocket<::rocket::Build> = ___rocket;
|
||||||
___rocket
|
___rocket
|
||||||
|
@ -47,7 +47,7 @@ impl EntryAttr for Launch {
|
||||||
sig.output = syn::ReturnType::Default;
|
sig.output = syn::ReturnType::Default;
|
||||||
sig.asyncness = None;
|
sig.asyncness = None;
|
||||||
|
|
||||||
Ok(quote_spanned!(block.span().into() =>
|
Ok(quote_spanned!(block.span() =>
|
||||||
#[allow(dead_code)] #f
|
#[allow(dead_code)] #f
|
||||||
|
|
||||||
#vis #sig {
|
#vis #sig {
|
||||||
|
|
|
@ -21,7 +21,7 @@ impl EntryAttr for Main {
|
||||||
}
|
}
|
||||||
|
|
||||||
sig.asyncness = None;
|
sig.asyncness = None;
|
||||||
Ok(quote_spanned!(block.span().into() => #(#attrs)* #vis #sig {
|
Ok(quote_spanned!(block.span() => #(#attrs)* #vis #sig {
|
||||||
::rocket::async_main(async move #block)
|
::rocket::async_main(async move #block)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ fn _async_entry<A: EntryAttr>(
|
||||||
.span_note(function.sig.span(), "this function must take no arguments"));
|
.span_note(function.sig.span(), "this function must take no arguments"));
|
||||||
}
|
}
|
||||||
|
|
||||||
A::function(&mut function).map(|t| t.into())
|
A::function(&mut function)
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! async_entry {
|
macro_rules! async_entry {
|
||||||
|
|
|
@ -12,7 +12,7 @@ impl EntryAttr for Test {
|
||||||
fn function(f: &mut syn::ItemFn) -> Result<TokenStream> {
|
fn function(f: &mut syn::ItemFn) -> Result<TokenStream> {
|
||||||
let (attrs, vis, block, sig) = (&f.attrs, &f.vis, &f.block, &mut f.sig);
|
let (attrs, vis, block, sig) = (&f.attrs, &f.vis, &f.block, &mut f.sig);
|
||||||
sig.asyncness = None;
|
sig.asyncness = None;
|
||||||
Ok(quote_spanned!(block.span().into() => #(#attrs)* #[test] #vis #sig {
|
Ok(quote_spanned!(block.span() => #(#attrs)* #[test] #vis #sig {
|
||||||
::rocket::async_test(async move #block)
|
::rocket::async_test(async move #block)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
|
@ -228,7 +228,7 @@ fn internal_uri_macro_decl(route: &Route) -> TokenStream {
|
||||||
fn responder_outcome_expr(route: &Route) -> TokenStream {
|
fn responder_outcome_expr(route: &Route) -> TokenStream {
|
||||||
let ret_span = match route.handler.sig.output {
|
let ret_span = match route.handler.sig.output {
|
||||||
syn::ReturnType::Default => route.handler.sig.ident.span(),
|
syn::ReturnType::Default => route.handler.sig.ident.span(),
|
||||||
syn::ReturnType::Type(_, ref ty) => ty.span().into()
|
syn::ReturnType::Type(_, ref ty) => ty.span()
|
||||||
};
|
};
|
||||||
|
|
||||||
let user_handler_fn_name = &route.handler.sig.ident;
|
let user_handler_fn_name = &route.handler.sig.ident;
|
||||||
|
@ -236,7 +236,7 @@ fn responder_outcome_expr(route: &Route) -> TokenStream {
|
||||||
.map(|(ident, _)| ident.rocketized());
|
.map(|(ident, _)| ident.rocketized());
|
||||||
|
|
||||||
let _await = route.handler.sig.asyncness
|
let _await = route.handler.sig.asyncness
|
||||||
.map(|a| quote_spanned!(a.span().into() => .await));
|
.map(|a| quote_spanned!(a.span() => .await));
|
||||||
|
|
||||||
define_spanned_export!(ret_span => __req, _route);
|
define_spanned_export!(ret_span => __req, _route);
|
||||||
quote_spanned! { ret_span =>
|
quote_spanned! { ret_span =>
|
||||||
|
@ -375,12 +375,12 @@ fn codegen_route(route: Route) -> Result<TokenStream> {
|
||||||
|
|
||||||
/// Rocket code generated wrapping URI macro.
|
/// Rocket code generated wrapping URI macro.
|
||||||
#internal_uri_macro
|
#internal_uri_macro
|
||||||
}.into())
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_route(args: TokenStream, input: TokenStream) -> Result<TokenStream> {
|
fn complete_route(args: TokenStream, input: TokenStream) -> Result<TokenStream> {
|
||||||
let function: syn::ItemFn = syn::parse2(input)
|
let function: syn::ItemFn = syn::parse2(input)
|
||||||
.map_err(|e| Diagnostic::from(e))
|
.map_err(Diagnostic::from)
|
||||||
.map_err(|diag| diag.help("`#[route]` can only be used on functions"))?;
|
.map_err(|diag| diag.help("`#[route]` can only be used on functions"))?;
|
||||||
|
|
||||||
let attr_tokens = quote!(route(#args));
|
let attr_tokens = quote!(route(#args));
|
||||||
|
@ -398,10 +398,10 @@ fn incomplete_route(
|
||||||
let method_span = StringLit::new(format!("#[{}]", method), Span::call_site())
|
let method_span = StringLit::new(format!("#[{}]", method), Span::call_site())
|
||||||
.subspan(2..2 + method_str.len());
|
.subspan(2..2 + method_str.len());
|
||||||
|
|
||||||
let method_ident = syn::Ident::new(&method_str, method_span.into());
|
let method_ident = syn::Ident::new(&method_str, method_span);
|
||||||
|
|
||||||
let function: syn::ItemFn = syn::parse2(input)
|
let function: syn::ItemFn = syn::parse2(input)
|
||||||
.map_err(|e| Diagnostic::from(e))
|
.map_err(Diagnostic::from)
|
||||||
.map_err(|d| d.help(format!("#[{}] can only be used on functions", method_str)))?;
|
.map_err(|d| d.help(format!("#[{}] can only be used on functions", method_str)))?;
|
||||||
|
|
||||||
let full_attr = quote!(#method_ident(#args));
|
let full_attr = quote!(#method_ident(#args));
|
||||||
|
|
|
@ -84,7 +84,7 @@ impl FromMeta for RouteUri {
|
||||||
.and_then(|q| q.find("&&"))
|
.and_then(|q| q.find("&&"))
|
||||||
.map(|i| origin.path().len() + 1 + i))
|
.map(|i| origin.path().len() + 1 + i))
|
||||||
.map(|i| string.subspan((1 + i)..(1 + i + 2)))
|
.map(|i| string.subspan((1 + i)..(1 + i + 2)))
|
||||||
.unwrap_or(string.span());
|
.unwrap_or_else(|| string.span());
|
||||||
|
|
||||||
return Err(span.error("route URIs cannot contain empty segments")
|
return Err(span.error("route URIs cannot contain empty segments")
|
||||||
.note(format!("expected \"{}\", found \"{}\"", normalized, origin)));
|
.note(format!("expected \"{}\", found \"{}\"", normalized, origin)));
|
||||||
|
@ -103,7 +103,7 @@ impl FromMeta for RouteUri {
|
||||||
|
|
||||||
impl Route {
|
impl Route {
|
||||||
pub fn upgrade_param(param: Parameter, args: &Arguments) -> Result<Parameter> {
|
pub fn upgrade_param(param: Parameter, args: &Arguments) -> Result<Parameter> {
|
||||||
if !param.dynamic().is_some() {
|
if param.dynamic().is_none() {
|
||||||
return Ok(param);
|
return Ok(param);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,14 +160,14 @@ impl Route {
|
||||||
let (source, span) = (attr.uri.path(), attr.uri.path_span);
|
let (source, span) = (attr.uri.path(), attr.uri.path_span);
|
||||||
let path_params = Parameter::parse_many::<fmt::Path>(source.as_str(), span)
|
let path_params = Parameter::parse_many::<fmt::Path>(source.as_str(), span)
|
||||||
.map(|p| Route::upgrade_param(p?, &arguments))
|
.map(|p| Route::upgrade_param(p?, &arguments))
|
||||||
.filter_map(|p| p.map_err(|e| diags.push(e.into())).ok())
|
.filter_map(|p| p.map_err(|e| diags.push(e)).ok())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Parse and collect the query parameters.
|
// Parse and collect the query parameters.
|
||||||
let query_params = match (attr.uri.query(), attr.uri.query_span) {
|
let query_params = match (attr.uri.query(), attr.uri.query_span) {
|
||||||
(Some(q), Some(span)) => Parameter::parse_many::<fmt::Query>(q.as_str(), span)
|
(Some(q), Some(span)) => Parameter::parse_many::<fmt::Query>(q.as_str(), span)
|
||||||
.map(|p| Route::upgrade_param(p?, &arguments))
|
.map(|p| Route::upgrade_param(p?, &arguments))
|
||||||
.filter_map(|p| p.map_err(|e| diags.push(e.into())).ok())
|
.filter_map(|p| p.map_err(|e| diags.push(e)).ok())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
_ => vec![]
|
_ => vec![]
|
||||||
};
|
};
|
||||||
|
@ -175,7 +175,7 @@ impl Route {
|
||||||
// Remove the `SpanWrapped` layer and upgrade to a guard.
|
// Remove the `SpanWrapped` layer and upgrade to a guard.
|
||||||
let data_guard = attr.data.clone()
|
let data_guard = attr.data.clone()
|
||||||
.map(|p| Route::upgrade_dynamic(p.value, &arguments))
|
.map(|p| Route::upgrade_dynamic(p.value, &arguments))
|
||||||
.and_then(|p| p.map_err(|e| diags.push(e.into())).ok());
|
.and_then(|p| p.map_err(|e| diags.push(e)).ok());
|
||||||
|
|
||||||
// Collect all of the declared dynamic route parameters.
|
// Collect all of the declared dynamic route parameters.
|
||||||
let all_dyn_params = path_params.iter().filter_map(|p| p.dynamic())
|
let all_dyn_params = path_params.iter().filter_map(|p| p.dynamic())
|
||||||
|
|
|
@ -6,7 +6,7 @@ use devise::ext::SpanDiagnosticExt;
|
||||||
use proc_macro2::TokenStream;
|
use proc_macro2::TokenStream;
|
||||||
|
|
||||||
pub fn _macro(input: proc_macro::TokenStream) -> devise::Result<TokenStream> {
|
pub fn _macro(input: proc_macro::TokenStream) -> devise::Result<TokenStream> {
|
||||||
let root_glob = syn::parse::<LitStr>(input.into())?;
|
let root_glob = syn::parse::<LitStr>(input)?;
|
||||||
let tests = entry_to_tests(&root_glob)
|
let tests = entry_to_tests(&root_glob)
|
||||||
.map_err(|e| root_glob.span().error(format!("failed to read: {}", e)))?;
|
.map_err(|e| root_glob.span().error(format!("failed to read: {}", e)))?;
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ fn entry_to_tests(root_glob: &LitStr) -> Result<Vec<TokenStream>, Box<dyn Error>
|
||||||
.and_then(|f| f.to_str())
|
.and_then(|f| f.to_str())
|
||||||
.map(|name| name.trim_matches(|c| char::is_numeric(c) || c == '-')
|
.map(|name| name.trim_matches(|c| char::is_numeric(c) || c == '-')
|
||||||
.replace(|c| c == '-' || c == '.', "_"))
|
.replace(|c| c == '-' || c == '.', "_"))
|
||||||
.ok_or_else(|| "invalid file name")?;
|
.ok_or("invalid file name")?;
|
||||||
|
|
||||||
let ident = Ident::new(&name.to_lowercase(), root_glob.span());
|
let ident = Ident::new(&name.to_lowercase(), root_glob.span());
|
||||||
let full_path = Path::new(&manifest_dir).join(&path).display().to_string();
|
let full_path = Path::new(&manifest_dir).join(&path).display().to_string();
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub fn prefix_last_segment(path: &mut syn::Path, prefix: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn _uri_macro(input: TokenStream) -> Result<TokenStream> {
|
pub fn _uri_macro(input: TokenStream) -> Result<TokenStream> {
|
||||||
let input2: TokenStream = input.clone().into();
|
let input2: TokenStream = input.clone();
|
||||||
match syn::parse2::<UriMacro>(input)? {
|
match syn::parse2::<UriMacro>(input)? {
|
||||||
UriMacro::Routed(ref mut mac) => {
|
UriMacro::Routed(ref mut mac) => {
|
||||||
prefix_last_segment(&mut mac.route.path, URI_MACRO_PREFIX);
|
prefix_last_segment(&mut mac.route.path, URI_MACRO_PREFIX);
|
||||||
|
@ -39,10 +39,10 @@ pub fn _uri_macro(input: TokenStream) -> Result<TokenStream> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_exprs<'a>(internal: &'a InternalUriParams) -> Result<(
|
fn extract_exprs(internal: &InternalUriParams) -> Result<(
|
||||||
impl Iterator<Item = &'a Expr>, // path exprs
|
impl Iterator<Item = &Expr>, // path exprs
|
||||||
impl Iterator<Item = &'a ArgExpr>, // query exprs
|
impl Iterator<Item = &ArgExpr>, // query exprs
|
||||||
impl Iterator<Item = (&'a Ident, &'a Type)>, // types for both path || query
|
impl Iterator<Item = (&Ident, &Type)>, // types for both path || query
|
||||||
)>
|
)>
|
||||||
{
|
{
|
||||||
let route_name = &internal.uri_mac.route.path;
|
let route_name = &internal.uri_mac.route.path;
|
||||||
|
@ -51,7 +51,7 @@ fn extract_exprs<'a>(internal: &'a InternalUriParams) -> Result<(
|
||||||
let path_params = internal.dynamic_path_params();
|
let path_params = internal.dynamic_path_params();
|
||||||
let path_param_count = path_params.clone().count();
|
let path_param_count = path_params.clone().count();
|
||||||
for expr in exprs.iter().take(path_param_count) {
|
for expr in exprs.iter().take(path_param_count) {
|
||||||
if !expr.as_expr().is_some() {
|
if expr.as_expr().is_none() {
|
||||||
return Err(expr.span().error("path parameters cannot be ignored"));
|
return Err(expr.span().error("path parameters cannot be ignored"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,7 +127,7 @@ pub struct FnArg {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err<T, S: AsRef<str>>(span: Span, s: S) -> parse::Result<T> {
|
fn err<T, S: AsRef<str>>(span: Span, s: S) -> parse::Result<T> {
|
||||||
Err(parse::Error::new(span.into(), s.as_ref()))
|
Err(parse::Error::new(span, s.as_ref()))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Parse for ArgExpr {
|
impl Parse for ArgExpr {
|
||||||
|
@ -261,7 +261,7 @@ impl Parse for UriMacro {
|
||||||
|
|
||||||
stream.is_empty()
|
stream.is_empty()
|
||||||
.then(|| Ok((None, cursor)))
|
.then(|| Ok((None, cursor)))
|
||||||
.unwrap_or_else(|| Ok((Some(stream), cursor)))
|
.unwrap_or(Ok((Some(stream), cursor)))
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut args = vec![];
|
let mut args = vec![];
|
||||||
|
@ -276,7 +276,7 @@ impl Parse for UriMacro {
|
||||||
1 => UriMacro::unary.parse2(next()),
|
1 => UriMacro::unary.parse2(next()),
|
||||||
2 => UriMacro::binary(next(), next()),
|
2 => UriMacro::binary(next(), next()),
|
||||||
3 => UriMacro::ternary(next(), next(), next()),
|
3 => UriMacro::ternary(next(), next(), next()),
|
||||||
n => err(iter.skip(3).next().unwrap().span(),
|
n => err(iter.nth(3).unwrap().span(),
|
||||||
format!("expected 1, 2, or 3 arguments, found {}", n))
|
format!("expected 1, 2, or 3 arguments, found {}", n))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -479,7 +479,7 @@ fn uri_err<T>(lit: &StringLit, error: Error<'_>) -> parse::Result<T> {
|
||||||
|
|
||||||
impl UriExpr {
|
impl UriExpr {
|
||||||
fn parse_prefix(input: ParseStream<'_>) -> syn::Result<Option<Self>> {
|
fn parse_prefix(input: ParseStream<'_>) -> syn::Result<Option<Self>> {
|
||||||
if let Ok(_) = input.parse::<Token![_]>() {
|
if input.parse::<Token![_]>().is_ok() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -490,7 +490,7 @@ impl UriExpr {
|
||||||
let lit = input.parse::<StringLit>()?;
|
let lit = input.parse::<StringLit>()?;
|
||||||
let uri = Uri::parse::<Origin<'_>>(&lit)
|
let uri = Uri::parse::<Origin<'_>>(&lit)
|
||||||
.or_else(|e| Uri::parse::<Absolute<'_>>(&lit).map_err(|e2| (e, e2)))
|
.or_else(|e| Uri::parse::<Absolute<'_>>(&lit).map_err(|e2| (e, e2)))
|
||||||
.map_err(|(e1, e2)| lit.starts_with('/').then(|| e1).unwrap_or_else(|| e2))
|
.map_err(|(e1, e2)| lit.starts_with('/').then(|| e1).unwrap_or(e2))
|
||||||
.or_else(|e| uri_err(&lit, e))?;
|
.or_else(|e| uri_err(&lit, e))?;
|
||||||
|
|
||||||
if matches!(&uri, Uri::Origin(o) if o.query().is_some())
|
if matches!(&uri, Uri::Origin(o) if o.query().is_some())
|
||||||
|
@ -503,7 +503,7 @@ impl UriExpr {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_suffix(input: ParseStream<'_>) -> syn::Result<Option<Self>> {
|
fn parse_suffix(input: ParseStream<'_>) -> syn::Result<Option<Self>> {
|
||||||
if let Ok(_) = input.parse::<Token![_]>() {
|
if input.parse::<Token![_]>().is_ok() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -301,7 +301,7 @@ impl VisitMut for ValidationMutator<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return syn::visit_mut::visit_expr_mut(self, i);
|
syn::visit_mut::visit_expr_mut(self, i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -385,10 +385,10 @@ pub fn default<'v>(field: Field<'v>) -> Result<Option<TokenStream>> {
|
||||||
let ty = field.stripped_ty();
|
let ty = field.stripped_ty();
|
||||||
match (default, default_with) {
|
match (default, default_with) {
|
||||||
(Some(e1), Some(e2)) => {
|
(Some(e1), Some(e2)) => {
|
||||||
return Err(e1.span()
|
Err(e1.span()
|
||||||
.error("duplicate default expressions")
|
.error("duplicate default expressions")
|
||||||
.help("only one of `default` or `default_with` must be used")
|
.help("only one of `default` or `default_with` must be used")
|
||||||
.span_note(e2.span(), "other default expression is here"));
|
.span_note(e2.span(), "other default expression is here"))
|
||||||
},
|
},
|
||||||
(Some(e), None) | (None, Some(e)) => {
|
(Some(e), None) | (None, Some(e)) => {
|
||||||
Ok(Some(quote_spanned!(e.span() => {
|
Ok(Some(quote_spanned!(e.span() => {
|
||||||
|
@ -420,8 +420,8 @@ pub fn first_duplicate<K: Spanned, V: PartialEq + Spanned>(
|
||||||
let key = |k| key_map.iter().find(|(i, _)| k < *i).expect("k < *i");
|
let key = |k| key_map.iter().find(|(i, _)| k < *i).expect("k < *i");
|
||||||
|
|
||||||
for (i, a) in all_values.iter().enumerate() {
|
for (i, a) in all_values.iter().enumerate() {
|
||||||
let rest = all_values.iter().enumerate().skip(i + 1);
|
let mut rest = all_values.iter().enumerate().skip(i + 1);
|
||||||
if let Some((j, b)) = rest.filter(|(_, b)| *b == a).next() {
|
if let Some((j, b)) = rest.find(|(_, b)| *b == a) {
|
||||||
let (a_i, key_a) = key(i);
|
let (a_i, key_a) = key(i);
|
||||||
let (b_i, key_b) = key(j);
|
let (b_i, key_b) = key(j);
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,7 @@ pub fn derive_responder(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
false => Ok(())
|
false => Ok(())
|
||||||
})
|
})
|
||||||
.fields_validate(|_, fields| match fields.is_empty() {
|
.fields_validate(|_, fields| match fields.is_empty() {
|
||||||
true => return Err(fields.span().error("need at least one field")),
|
true => Err(fields.span().error("need at least one field")),
|
||||||
false => Ok(())
|
false => Ok(())
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -71,7 +71,7 @@ pub fn derive_responder(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
})
|
})
|
||||||
.try_fields_map(|_, fields| {
|
.try_fields_map(|_, fields| {
|
||||||
fn set_header_tokens<T: ToTokens + Spanned>(item: T) -> TokenStream {
|
fn set_header_tokens<T: ToTokens + Spanned>(item: T) -> TokenStream {
|
||||||
quote_spanned!(item.span().into() => __res.set_header(#item);)
|
quote_spanned!(item.span() => __res.set_header(#item);)
|
||||||
}
|
}
|
||||||
|
|
||||||
let attr = ItemAttr::one_from_attrs("response", fields.parent.attrs())?
|
let attr = ItemAttr::one_from_attrs("response", fields.parent.attrs())?
|
||||||
|
@ -98,7 +98,7 @@ pub fn derive_responder(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
|
|
||||||
let content_type = attr.content_type.map(set_header_tokens);
|
let content_type = attr.content_type.map(set_header_tokens);
|
||||||
let status = attr.status.map(|status| {
|
let status = attr.status.map(|status| {
|
||||||
quote_spanned!(status.span().into() => __res.set_status(#status);)
|
quote_spanned!(status.span() => __res.set_status(#status);)
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub fn derive_uri_display_query(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
.validator(ValidatorBuild::new()
|
.validator(ValidatorBuild::new()
|
||||||
.enum_validate(|_, data| {
|
.enum_validate(|_, data| {
|
||||||
if data.variants().count() == 0 {
|
if data.variants().count() == 0 {
|
||||||
return Err(data.brace_token.span.error(NO_EMPTY_ENUMS));
|
Err(data.brace_token.span.error(NO_EMPTY_ENUMS))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ pub fn derive_uri_display_query(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.try_field_map(|_, field| {
|
.try_field_map(|_, field| {
|
||||||
let span = field.span().into();
|
let span = field.span();
|
||||||
let accessor = field.accessor();
|
let accessor = field.accessor();
|
||||||
let tokens = if let Some(name) = field.first_field_name()? {
|
let tokens = if let Some(name) = field.first_field_name()? {
|
||||||
quote_spanned!(span => f.write_named_value(#name, &#accessor)?;)
|
quote_spanned!(span => f.write_named_value(#name, &#accessor)?;)
|
||||||
|
@ -100,13 +100,13 @@ pub fn derive_uri_display_query(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
|
|
||||||
let from_self = from_uri_param::<fmt::Query>(input.clone(), quote!(Self));
|
let from_self = from_uri_param::<fmt::Query>(input.clone(), quote!(Self));
|
||||||
let from_ref = from_uri_param::<fmt::Query>(input.clone(), quote!(&'__r Self));
|
let from_ref = from_uri_param::<fmt::Query>(input.clone(), quote!(&'__r Self));
|
||||||
let from_mut = from_uri_param::<fmt::Query>(input.clone(), quote!(&'__r mut Self));
|
let from_mut = from_uri_param::<fmt::Query>(input, quote!(&'__r mut Self));
|
||||||
|
|
||||||
let mut ts = TokenStream::from(uri_display);
|
let mut ts = uri_display;
|
||||||
ts.extend(TokenStream::from(from_self));
|
ts.extend(from_self);
|
||||||
ts.extend(TokenStream::from(from_ref));
|
ts.extend(from_ref);
|
||||||
ts.extend(TokenStream::from(from_mut));
|
ts.extend(from_mut);
|
||||||
ts.into()
|
ts
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
|
@ -141,13 +141,13 @@ pub fn derive_uri_display_path(input: proc_macro::TokenStream) -> TokenStream {
|
||||||
|
|
||||||
let from_self = from_uri_param::<fmt::Path>(input.clone(), quote!(Self));
|
let from_self = from_uri_param::<fmt::Path>(input.clone(), quote!(Self));
|
||||||
let from_ref = from_uri_param::<fmt::Path>(input.clone(), quote!(&'__r Self));
|
let from_ref = from_uri_param::<fmt::Path>(input.clone(), quote!(&'__r Self));
|
||||||
let from_mut = from_uri_param::<fmt::Path>(input.clone(), quote!(&'__r mut Self));
|
let from_mut = from_uri_param::<fmt::Path>(input, quote!(&'__r mut Self));
|
||||||
|
|
||||||
let mut ts = TokenStream::from(uri_display);
|
let mut ts = uri_display;
|
||||||
ts.extend(TokenStream::from(from_self));
|
ts.extend(from_self);
|
||||||
ts.extend(TokenStream::from(from_ref));
|
ts.extend(from_ref);
|
||||||
ts.extend(TokenStream::from(from_mut));
|
ts.extend(from_mut);
|
||||||
ts.into()
|
ts
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_uri_param<P: fmt::Part>(input: proc_macro::TokenStream, ty: TokenStream) -> TokenStream {
|
fn from_uri_param<P: fmt::Part>(input: proc_macro::TokenStream, ty: TokenStream) -> TokenStream {
|
||||||
|
|
|
@ -56,7 +56,7 @@ impl FromMeta for ContentType {
|
||||||
fn from_meta(meta: &MetaItem) -> Result<Self> {
|
fn from_meta(meta: &MetaItem) -> Result<Self> {
|
||||||
http::ContentType::parse_flexible(&String::from_meta(meta)?)
|
http::ContentType::parse_flexible(&String::from_meta(meta)?)
|
||||||
.map(ContentType)
|
.map(ContentType)
|
||||||
.ok_or(meta.value_span().error("invalid or unknown content type"))
|
.ok_or_else(|| meta.value_span().error("invalid or unknown content type"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ impl ToTokens for ContentType {
|
||||||
impl FromMeta for MediaType {
|
impl FromMeta for MediaType {
|
||||||
fn from_meta(meta: &MetaItem) -> Result<Self> {
|
fn from_meta(meta: &MetaItem) -> Result<Self> {
|
||||||
let mt = http::MediaType::parse_flexible(&String::from_meta(meta)?)
|
let mt = http::MediaType::parse_flexible(&String::from_meta(meta)?)
|
||||||
.ok_or(meta.value_span().error("invalid or unknown media type"))?;
|
.ok_or_else(|| meta.value_span().error("invalid or unknown media type"))?;
|
||||||
|
|
||||||
if !mt.is_known() {
|
if !mt.is_known() {
|
||||||
// FIXME(diag: warning)
|
// FIXME(diag: warning)
|
||||||
|
|
|
@ -212,15 +212,12 @@ impl TypeExt for syn::Type {
|
||||||
match ty {
|
match ty {
|
||||||
Path(t) if self.1.iter().any(|i| t.path.is_ident(*i)) => {
|
Path(t) if self.1.iter().any(|i| t.path.is_ident(*i)) => {
|
||||||
self.0 = false;
|
self.0 = false;
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
ImplTrait(_) | Infer(_) | Macro(_) => {
|
ImplTrait(_) | Infer(_) | Macro(_) => {
|
||||||
self.0 = false;
|
self.0 = false;
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
BareFn(_) | Never(_) => {
|
BareFn(_) | Never(_) => {
|
||||||
self.0 = true;
|
self.0 = true;
|
||||||
return;
|
|
||||||
},
|
},
|
||||||
_ => syn::visit::visit_type(self, ty),
|
_ => syn::visit::visit_type(self, ty),
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,10 @@ error[E0277]: the trait bound `Header<'_>: From<u8>` is not satisfied
|
||||||
|
|
|
|
||||||
= help: the following implementations were found:
|
= help: the following implementations were found:
|
||||||
<Header<'static> as From<&Cookie<'_>>>
|
<Header<'static> as From<&Cookie<'_>>>
|
||||||
<Header<'static> as From<Cookie<'_>>>
|
<Header<'static> as From<&ExpectCt>>
|
||||||
|
<Header<'static> as From<&Frame>>
|
||||||
|
<Header<'static> as From<&Hsts>>
|
||||||
|
and 8 others
|
||||||
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
||||||
|
|
||||||
error[E0277]: the trait bound `u8: Responder<'_, '_>` is not satisfied
|
error[E0277]: the trait bound `u8: Responder<'_, '_>` is not satisfied
|
||||||
|
@ -33,7 +36,10 @@ error[E0277]: the trait bound `Header<'_>: From<u8>` is not satisfied
|
||||||
|
|
|
|
||||||
= help: the following implementations were found:
|
= help: the following implementations were found:
|
||||||
<Header<'static> as From<&Cookie<'_>>>
|
<Header<'static> as From<&Cookie<'_>>>
|
||||||
<Header<'static> as From<Cookie<'_>>>
|
<Header<'static> as From<&ExpectCt>>
|
||||||
|
<Header<'static> as From<&Frame>>
|
||||||
|
<Header<'static> as From<&Hsts>>
|
||||||
|
and 8 others
|
||||||
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
||||||
|
|
||||||
error[E0277]: the trait bound `Header<'_>: From<std::string::String>` is not satisfied
|
error[E0277]: the trait bound `Header<'_>: From<std::string::String>` is not satisfied
|
||||||
|
@ -44,7 +50,10 @@ error[E0277]: the trait bound `Header<'_>: From<std::string::String>` is not sat
|
||||||
|
|
|
|
||||||
= help: the following implementations were found:
|
= help: the following implementations were found:
|
||||||
<Header<'static> as From<&Cookie<'_>>>
|
<Header<'static> as From<&Cookie<'_>>>
|
||||||
<Header<'static> as From<Cookie<'_>>>
|
<Header<'static> as From<&ExpectCt>>
|
||||||
|
<Header<'static> as From<&Frame>>
|
||||||
|
<Header<'static> as From<&Hsts>>
|
||||||
|
and 8 others
|
||||||
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `std::string::String`
|
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `std::string::String`
|
||||||
|
|
||||||
error[E0277]: the trait bound `usize: Responder<'_, '_>` is not satisfied
|
error[E0277]: the trait bound `usize: Responder<'_, '_>` is not satisfied
|
||||||
|
|
|
@ -14,7 +14,10 @@ error[E0277]: the trait bound `Header<'_>: From<u8>` is not satisfied
|
||||||
|
|
|
|
||||||
= help: the following implementations were found:
|
= help: the following implementations were found:
|
||||||
<Header<'static> as From<&Cookie<'_>>>
|
<Header<'static> as From<&Cookie<'_>>>
|
||||||
<Header<'static> as From<Cookie<'_>>>
|
<Header<'static> as From<&ExpectCt>>
|
||||||
|
<Header<'static> as From<&Frame>>
|
||||||
|
<Header<'static> as From<&Hsts>>
|
||||||
|
and 8 others
|
||||||
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
||||||
|
|
||||||
error[E0277]: the trait bound `u8: Responder<'_, '_>` is not satisfied
|
error[E0277]: the trait bound `u8: Responder<'_, '_>` is not satisfied
|
||||||
|
@ -33,7 +36,10 @@ error[E0277]: the trait bound `Header<'_>: From<u8>` is not satisfied
|
||||||
|
|
|
|
||||||
= help: the following implementations were found:
|
= help: the following implementations were found:
|
||||||
<Header<'static> as From<&Cookie<'_>>>
|
<Header<'static> as From<&Cookie<'_>>>
|
||||||
<Header<'static> as From<Cookie<'_>>>
|
<Header<'static> as From<&ExpectCt>>
|
||||||
|
<Header<'static> as From<&Frame>>
|
||||||
|
<Header<'static> as From<&Hsts>>
|
||||||
|
and 8 others
|
||||||
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `u8`
|
||||||
|
|
||||||
error[E0277]: the trait bound `Header<'_>: From<std::string::String>` is not satisfied
|
error[E0277]: the trait bound `Header<'_>: From<std::string::String>` is not satisfied
|
||||||
|
@ -44,7 +50,10 @@ error[E0277]: the trait bound `Header<'_>: From<std::string::String>` is not sat
|
||||||
|
|
|
|
||||||
= help: the following implementations were found:
|
= help: the following implementations were found:
|
||||||
<Header<'static> as From<&Cookie<'_>>>
|
<Header<'static> as From<&Cookie<'_>>>
|
||||||
<Header<'static> as From<Cookie<'_>>>
|
<Header<'static> as From<&ExpectCt>>
|
||||||
|
<Header<'static> as From<&Frame>>
|
||||||
|
<Header<'static> as From<&Hsts>>
|
||||||
|
and 8 others
|
||||||
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `std::string::String`
|
= note: required because of the requirements on the impl of `Into<Header<'_>>` for `std::string::String`
|
||||||
|
|
||||||
error[E0277]: the trait bound `usize: Responder<'_, '_>` is not satisfied
|
error[E0277]: the trait bound `usize: Responder<'_, '_>` is not satisfied
|
||||||
|
|
|
@ -210,7 +210,7 @@ impl Accept {
|
||||||
/// assert_eq!(iter.next(), None);
|
/// assert_eq!(iter.next(), None);
|
||||||
/// ```
|
/// ```
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn iter<'a>(&'a self) -> impl Iterator<Item=&'a QMediaType> + 'a {
|
pub fn iter(&self) -> impl Iterator<Item=&'_ QMediaType> + '_ {
|
||||||
match self.0 {
|
match self.0 {
|
||||||
AcceptParams::Static(ref val) => Either::Left(Some(val).into_iter()),
|
AcceptParams::Static(ref val) => Either::Left(Some(val).into_iter()),
|
||||||
AcceptParams::Dynamic(ref vec) => Either::Right(vec.iter())
|
AcceptParams::Dynamic(ref vec) => Either::Right(vec.iter())
|
||||||
|
@ -239,7 +239,7 @@ impl Accept {
|
||||||
/// assert_eq!(iter.next(), None);
|
/// assert_eq!(iter.next(), None);
|
||||||
/// ```
|
/// ```
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn media_types<'a>(&'a self) -> impl Iterator<Item=&'a MediaType> + 'a {
|
pub fn media_types(&self) -> impl Iterator<Item=&'_ MediaType> + '_ {
|
||||||
self.iter().map(|weighted_mt| weighted_mt.media_type())
|
self.iter().map(|weighted_mt| weighted_mt.media_type())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -285,10 +285,10 @@ impl FromStr for Accept {
|
||||||
|
|
||||||
/// Creates a new `Header` with name `Accept` and the value set to the HTTP
|
/// Creates a new `Header` with name `Accept` and the value set to the HTTP
|
||||||
/// rendering of this `Accept` header.
|
/// rendering of this `Accept` header.
|
||||||
impl Into<Header<'static>> for Accept {
|
impl From<Accept> for Header<'static> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn into(self) -> Header<'static> {
|
fn from(val: Accept) -> Self {
|
||||||
Header::new("Accept", self.to_string())
|
Header::new("Accept", val.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -380,47 +380,47 @@ impl Extend<QMediaType> for AcceptParams {
|
||||||
mod test {
|
mod test {
|
||||||
use crate::{Accept, MediaType};
|
use crate::{Accept, MediaType};
|
||||||
|
|
||||||
macro_rules! assert_preference {
|
#[track_caller]
|
||||||
($string:expr, $expect:expr) => (
|
fn assert_preference(string: &str, expect: &str) {
|
||||||
let accept: Accept = $string.parse().expect("accept string parse");
|
let accept: Accept = string.parse().expect("accept string parse");
|
||||||
let expected: MediaType = $expect.parse().expect("media type parse");
|
let expected: MediaType = expect.parse().expect("media type parse");
|
||||||
let preferred = accept.preferred();
|
let preferred = accept.preferred();
|
||||||
assert_eq!(preferred.media_type().to_string(), expected.to_string());
|
let actual = preferred.media_type();
|
||||||
)
|
if *actual != expected {
|
||||||
|
panic!("mismatch for {}: expected {}, got {}", string, expected, actual)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_preferred() {
|
fn test_preferred() {
|
||||||
assert_preference!("text/*", "text/*");
|
assert_preference("text/*", "text/*");
|
||||||
assert_preference!("text/*, text/html", "text/html");
|
assert_preference("text/*, text/html", "text/html");
|
||||||
assert_preference!("text/*; q=0.1, text/html", "text/html");
|
assert_preference("text/*; q=0.1, text/html", "text/html");
|
||||||
assert_preference!("text/*; q=1, text/html", "text/html");
|
assert_preference("text/*; q=1, text/html", "text/html");
|
||||||
assert_preference!("text/html, text/*", "text/html");
|
assert_preference("text/html, text/*", "text/html");
|
||||||
assert_preference!("text/*, text/html", "text/html");
|
assert_preference("text/*, text/html", "text/html");
|
||||||
assert_preference!("text/html, text/*; q=1", "text/html");
|
assert_preference("text/html, text/*; q=1", "text/html");
|
||||||
assert_preference!("text/html; q=1, text/html", "text/html");
|
assert_preference("text/html; q=1, text/html", "text/html");
|
||||||
assert_preference!("text/html, text/*; q=0.1", "text/html");
|
assert_preference("text/html, text/*; q=0.1", "text/html");
|
||||||
|
|
||||||
assert_preference!("text/html, application/json", "text/html");
|
assert_preference("text/html, application/json", "text/html");
|
||||||
assert_preference!("text/html, application/json; q=1", "text/html");
|
assert_preference("text/html, application/json; q=1", "text/html");
|
||||||
assert_preference!("application/json; q=1, text/html", "text/html");
|
assert_preference("application/json; q=1, text/html", "text/html");
|
||||||
|
|
||||||
assert_preference!("text/*, application/json", "application/json");
|
assert_preference("text/*, application/json", "application/json");
|
||||||
assert_preference!("*/*, text/*", "text/*");
|
assert_preference("*/*, text/*", "text/*");
|
||||||
assert_preference!("*/*, text/*, text/plain", "text/plain");
|
assert_preference("*/*, text/*, text/plain", "text/plain");
|
||||||
|
|
||||||
assert_preference!("a/b; q=0.1, a/b; q=0.2", "a/b; q=0.2");
|
assert_preference("a/b; q=0.1, a/b; q=0.2", "a/b; q=0.2");
|
||||||
assert_preference!("a/b; q=0.1, b/c; q=0.2", "b/c; q=0.2");
|
assert_preference("a/b; q=0.1, b/c; q=0.2", "b/c; q=0.2");
|
||||||
assert_preference!("a/b; q=0.5, b/c; q=0.2", "a/b; q=0.5");
|
assert_preference("a/b; q=0.5, b/c; q=0.2", "a/b; q=0.5");
|
||||||
|
|
||||||
assert_preference!("a/b; q=0.5, b/c; q=0.2, c/d", "c/d");
|
assert_preference("a/b; q=0.5, b/c; q=0.2, c/d", "c/d");
|
||||||
assert_preference!("a/b; q=0.5; v=1, a/b", "a/b");
|
assert_preference("a/b; q=0.5; v=1, a/b", "a/b");
|
||||||
|
|
||||||
assert_preference!("a/b; v=1, a/b; v=1; c=2", "a/b; v=1; c=2");
|
assert_preference("a/b; v=1, a/b; v=1; c=2", "a/b; v=1; c=2");
|
||||||
assert_preference!("a/b; v=1; c=2, a/b; v=1", "a/b; v=1; c=2");
|
assert_preference("a/b; v=1; c=2, a/b; v=1", "a/b; v=1; c=2");
|
||||||
assert_preference!("a/b; q=0.5; v=1, a/b; q=0.5; v=1; c=2",
|
assert_preference("a/b; q=0.5; v=1, a/b; q=0.5; v=1; c=2", "a/b; q=0.5; v=1; c=2");
|
||||||
"a/b; q=0.5; v=1; c=2");
|
assert_preference("a/b; q=0.6; v=1, a/b; q=0.5; v=1; c=2", "a/b; q=0.6; v=1");
|
||||||
assert_preference!("a/b; q=0.6; v=1, a/b; q=0.5; v=1; c=2",
|
|
||||||
"a/b; q=0.6; v=1");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -382,13 +382,13 @@ impl fmt::Display for ContentType {
|
||||||
|
|
||||||
/// Creates a new `Header` with name `Content-Type` and the value set to the
|
/// Creates a new `Header` with name `Content-Type` and the value set to the
|
||||||
/// HTTP rendering of this Content-Type.
|
/// HTTP rendering of this Content-Type.
|
||||||
impl Into<Header<'static>> for ContentType {
|
impl From<ContentType> for Header<'static> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn into(self) -> Header<'static> {
|
fn from(content_type: ContentType) -> Self {
|
||||||
if let Some(src) = self.known_source() {
|
if let Some(src) = content_type.known_source() {
|
||||||
Header::new("Content-Type", src)
|
Header::new("Content-Type", src)
|
||||||
} else {
|
} else {
|
||||||
Header::new("Content-Type", self.to_string())
|
Header::new("Content-Type", content_type.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -546,9 +546,9 @@ impl MediaType {
|
||||||
/// assert_eq!(png.params().count(), 0);
|
/// assert_eq!(png.params().count(), 0);
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn params<'a>(&'a self) -> impl Iterator<Item=(&'a UncasedStr, &'a str)> + 'a {
|
pub fn params(&self) -> impl Iterator<Item=(&'_ UncasedStr, &'_ str)> + '_ {
|
||||||
let raw = match self.params {
|
let raw = match self.params {
|
||||||
MediaParams::Static(ref slice) => Either::Left(slice.iter().cloned()),
|
MediaParams::Static(slice) => Either::Left(slice.iter().cloned()),
|
||||||
MediaParams::Dynamic(ref vec) => {
|
MediaParams::Dynamic(ref vec) => {
|
||||||
Either::Right(vec.iter().map(move |&(ref key, ref val)| {
|
Either::Right(vec.iter().map(move |&(ref key, ref val)| {
|
||||||
let source_str = self.source.as_str();
|
let source_str = self.source.as_str();
|
||||||
|
|
|
@ -49,7 +49,7 @@ impl<A, T: ?Sized + ToOwned> From<Extent<A>> for Indexed<'_, T> {
|
||||||
impl<'a, T: ?Sized + ToOwned + 'a> From<Cow<'a, T>> for Indexed<'a, T> {
|
impl<'a, T: ?Sized + ToOwned + 'a> From<Cow<'a, T>> for Indexed<'a, T> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from(value: Cow<'a, T>) -> Indexed<'a, T> {
|
fn from(value: Cow<'a, T>) -> Indexed<'a, T> {
|
||||||
Indexed::Concrete(value.into())
|
Indexed::Concrete(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::fmt;
|
use std::{fmt, convert};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use pear::error::Expected;
|
use pear::error::Expected;
|
||||||
|
@ -21,7 +21,7 @@ pub struct Error<'a> {
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
impl<'a> From<ParseError<RawInput<'a>>> for Error<'a> {
|
impl<'a> From<ParseError<RawInput<'a>>> for Error<'a> {
|
||||||
fn from(inner: ParseError<RawInput<'a>>) -> Self {
|
fn from(inner: ParseError<RawInput<'a>>) -> Self {
|
||||||
let expected = inner.error.map(|t| t.into(), |v| v.values.into());
|
let expected = inner.error.map(convert::identity, |v| v.values.into());
|
||||||
Error { expected, index: inner.info.context.start }
|
Error { expected, index: inner.info.context.start }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,7 +52,7 @@ pub fn asterisk<'a>(input: &mut RawInput<'a>) -> Result<'a, Asterisk> {
|
||||||
#[parser]
|
#[parser]
|
||||||
pub fn origin<'a>(input: &mut RawInput<'a>) -> Result<'a, Origin<'a>> {
|
pub fn origin<'a>(input: &mut RawInput<'a>) -> Result<'a, Origin<'a>> {
|
||||||
let (_, path, query) = (peek(b'/')?, path()?, query()?);
|
let (_, path, query) = (peek(b'/')?, path()?, query()?);
|
||||||
unsafe { Origin::raw(input.start.into(), path.into(), query) }
|
unsafe { Origin::raw(input.start.into(), path, query) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[parser]
|
#[parser]
|
||||||
|
|
|
@ -114,7 +114,7 @@ impl RawStr {
|
||||||
/// let cow_raw = RawStr::from_cow_str(cow_str);
|
/// let cow_raw = RawStr::from_cow_str(cow_str);
|
||||||
/// assert_eq!(cow_raw.as_str(), "hello!");
|
/// assert_eq!(cow_raw.as_str(), "hello!");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn from_cow_str<'a>(cow: Cow<'a, str>) -> Cow<'a, RawStr> {
|
pub fn from_cow_str(cow: Cow<'_, str>) -> Cow<'_, RawStr> {
|
||||||
match cow {
|
match cow {
|
||||||
Cow::Borrowed(b) => Cow::Borrowed(b.into()),
|
Cow::Borrowed(b) => Cow::Borrowed(b.into()),
|
||||||
Cow::Owned(b) => Cow::Owned(b.into()),
|
Cow::Owned(b) => Cow::Owned(b.into()),
|
||||||
|
@ -136,7 +136,7 @@ impl RawStr {
|
||||||
/// let cow_str = RawStr::into_cow_str(cow_raw);
|
/// let cow_str = RawStr::into_cow_str(cow_raw);
|
||||||
/// assert_eq!(&*cow_str, "hello!");
|
/// assert_eq!(&*cow_str, "hello!");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn into_cow_str<'a>(cow: Cow<'a, RawStr>) -> Cow<'a, str> {
|
pub fn into_cow_str(cow: Cow<'_, RawStr>) -> Cow<'_, str> {
|
||||||
match cow {
|
match cow {
|
||||||
Cow::Borrowed(b) => Cow::Borrowed(b.as_str()),
|
Cow::Borrowed(b) => Cow::Borrowed(b.as_str()),
|
||||||
Cow::Owned(b) => Cow::Owned(b.into_string()),
|
Cow::Owned(b) => Cow::Owned(b.into_string()),
|
||||||
|
|
|
@ -350,7 +350,7 @@ impl<A, T: FromUriParam<fmt::Query, A>> FromUriParam<fmt::Query, Option<A>> for
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_uri_param(param: Option<A>) -> Self::Target {
|
fn from_uri_param(param: Option<A>) -> Self::Target {
|
||||||
param.map(|a| T::from_uri_param(a))
|
param.map(T::from_uri_param)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,7 +359,7 @@ impl<A, E, T: FromUriParam<fmt::Query, A>> FromUriParam<fmt::Query, Option<A>> f
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_uri_param(param: Option<A>) -> Self::Target {
|
fn from_uri_param(param: Option<A>) -> Self::Target {
|
||||||
param.map(|a| T::from_uri_param(a))
|
param.map(T::from_uri_param)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,7 +368,7 @@ impl<A, E, T: FromUriParam<fmt::Query, A>> FromUriParam<fmt::Query, Result<A, E>
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_uri_param(param: Result<A, E>) -> Self::Target {
|
fn from_uri_param(param: Result<A, E>) -> Self::Target {
|
||||||
param.map(|a| T::from_uri_param(a))
|
param.map(T::from_uri_param)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -377,6 +377,6 @@ impl<A, E, T: FromUriParam<fmt::Query, A>> FromUriParam<fmt::Query, Result<A, E>
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_uri_param(param: Result<A, E>) -> Self::Target {
|
fn from_uri_param(param: Result<A, E>) -> Self::Target {
|
||||||
param.map(|a| T::from_uri_param(a))
|
param.map(T::from_uri_param)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -323,7 +323,7 @@ impl<'a> Origin<'a> {
|
||||||
where F: FnOnce(&'s RawStr) -> P, P: Into<RawStrBuf> + 's
|
where F: FnOnce(&'s RawStr) -> P, P: Into<RawStrBuf> + 's
|
||||||
{
|
{
|
||||||
let path = f(self.path().raw()).into();
|
let path = f(self.path().raw()).into();
|
||||||
if !path.starts_with('/') || !path.as_bytes().iter().all(|b| is_pchar(&b)) {
|
if !path.starts_with('/') || !path.as_bytes().iter().all(is_pchar) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,7 @@ fn decode_to_indexed_str<P: fmt::Part>(
|
||||||
Cow::Borrowed(b) if indexed.is_indexed() => {
|
Cow::Borrowed(b) if indexed.is_indexed() => {
|
||||||
let indexed = IndexedStr::checked_from(b, source.as_str());
|
let indexed = IndexedStr::checked_from(b, source.as_str());
|
||||||
debug_assert!(indexed.is_some());
|
debug_assert!(indexed.is_some());
|
||||||
indexed.unwrap_or(IndexedStr::from(Cow::Borrowed("")))
|
indexed.unwrap_or_else(|| IndexedStr::from(Cow::Borrowed("")))
|
||||||
}
|
}
|
||||||
cow => IndexedStr::from(Cow::Owned(cow.into_owned())),
|
cow => IndexedStr::from(Cow::Owned(cow.into_owned())),
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ impl<'a> Path<'a> {
|
||||||
/// assert_eq!(uri.path().raw(), "/foo%20bar%2dbaz");
|
/// assert_eq!(uri.path().raw(), "/foo%20bar%2dbaz");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn raw(&self) -> &'a RawStr {
|
pub fn raw(&self) -> &'a RawStr {
|
||||||
self.data.value.from_cow_source(&self.source).into()
|
self.data.value.from_cow_source(self.source).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the raw, undecoded path value as an `&str`.
|
/// Returns the raw, undecoded path value as an `&str`.
|
||||||
|
@ -103,7 +103,7 @@ impl<'a> Path<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Normalizes `self`. If `absolute`, a starting `/` is required.
|
/// Normalizes `self`. If `absolute`, a starting `/` is required.
|
||||||
pub(crate) fn to_normalized(&self, absolute: bool) -> Data<'static, fmt::Path> {
|
pub(crate) fn to_normalized(self, absolute: bool) -> Data<'static, fmt::Path> {
|
||||||
let mut path = String::with_capacity(self.raw().len());
|
let mut path = String::with_capacity(self.raw().len());
|
||||||
let absolute = absolute || self.raw().starts_with('/');
|
let absolute = absolute || self.raw().starts_with('/');
|
||||||
for (i, seg) in self.raw_segments().filter(|s| !s.is_empty()).enumerate() {
|
for (i, seg) in self.raw_segments().filter(|s| !s.is_empty()).enumerate() {
|
||||||
|
@ -199,7 +199,7 @@ impl<'a> Query<'a> {
|
||||||
/// assert_eq!(uri.query().unwrap().raw(), "baz+bar");
|
/// assert_eq!(uri.query().unwrap().raw(), "baz+bar");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn raw(&self) -> &'a RawStr {
|
pub fn raw(&self) -> &'a RawStr {
|
||||||
self.data.value.from_cow_source(&self.source).into()
|
self.data.value.from_cow_source(self.source).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the raw, undecoded query value as an `&str`.
|
/// Returns the raw, undecoded query value as an `&str`.
|
||||||
|
@ -222,7 +222,7 @@ impl<'a> Query<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Normalizes `self`.
|
/// Normalizes `self`.
|
||||||
pub(crate) fn to_normalized(&self) -> Option<Data<'static, fmt::Query>> {
|
pub(crate) fn to_normalized(self) -> Option<Data<'static, fmt::Query>> {
|
||||||
let mut query = String::with_capacity(self.raw().len());
|
let mut query = String::with_capacity(self.raw().len());
|
||||||
for (i, seg) in self.raw_segments().filter(|s| !s.is_empty()).enumerate() {
|
for (i, seg) in self.raw_segments().filter(|s| !s.is_empty()).enumerate() {
|
||||||
if i != 0 { query.push('&'); }
|
if i != 0 { query.push('&'); }
|
||||||
|
|
|
@ -92,7 +92,7 @@ impl<'a> Reference<'a> {
|
||||||
Reference {
|
Reference {
|
||||||
source: Some(as_utf8_unchecked(source)),
|
source: Some(as_utf8_unchecked(source)),
|
||||||
scheme: scheme.map(|s| s.into()),
|
scheme: scheme.map(|s| s.into()),
|
||||||
authority: authority.map(|s| s.into()),
|
authority,
|
||||||
path: Data::raw(path),
|
path: Data::raw(path),
|
||||||
query: query.map(Data::raw),
|
query: query.map(Data::raw),
|
||||||
fragment: fragment.map(|f| f.into()),
|
fragment: fragment.map(|f| f.into()),
|
||||||
|
|
|
@ -154,7 +154,7 @@ impl<'a> Segments<'a, Path> {
|
||||||
/// assert!(b.path().segments().skip(1).prefix_of(a.path().segments()));
|
/// assert!(b.path().segments().skip(1).prefix_of(a.path().segments()));
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prefix_of<'b>(self, other: Segments<'b, Path>) -> bool {
|
pub fn prefix_of(self, other: Segments<'_, Path>) -> bool {
|
||||||
if self.len() > other.len() {
|
if self.len() > other.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,19 +3,19 @@
|
||||||
use yansi::{Paint, Color::{Red, Yellow}};
|
use yansi::{Paint, Color::{Red, Yellow}};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
const MIN_VERSION: &'static str = "1.46.0";
|
const MIN_VERSION: &str = "1.46.0";
|
||||||
|
|
||||||
if let Some(version) = version_check::Version::read() {
|
if let Some(version) = version_check::Version::read() {
|
||||||
if !version.at_least(MIN_VERSION) {
|
if !version.at_least(MIN_VERSION) {
|
||||||
let msg = "Rocket requires a more recent version of rustc.";
|
let msg = "Rocket requires a more recent version of rustc.";
|
||||||
eprintln!("{} {}", Red.paint("Error:").bold(), Paint::new(msg).bold());
|
eprintln!("{} {}", Red.paint("Error:").bold(), Paint::new(msg).bold());
|
||||||
eprintln!("Installed version: {}", Yellow.paint(format!("{}", version)));
|
eprintln!("Installed version: {}", Yellow.paint(version));
|
||||||
eprintln!("Minimum required: {}", Yellow.paint(format!("{}", MIN_VERSION)));
|
eprintln!("Minimum required: {}", Yellow.paint(MIN_VERSION));
|
||||||
panic!("Aborting compilation due to incompatible compiler.")
|
panic!("Aborting compilation due to incompatible compiler.")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
println!("cargo:warning={}", "Rocket was unable to check rustc compiler compatibility.");
|
println!("cargo:warning=Rocket was unable to check rustc compiler compatibility.");
|
||||||
println!("cargo:warning={}", "Build may fail due to incompatible rustc version.");
|
println!("cargo:warning=Build may fail due to incompatible rustc version.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(true) = version_check::is_feature_flaggable() {
|
if let Some(true) = version_check::is_feature_flaggable() {
|
||||||
|
|
|
@ -180,8 +180,8 @@ impl TlsConfig {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn from_bytes(certs: &[u8], key: &[u8]) -> Self {
|
pub fn from_bytes(certs: &[u8], key: &[u8]) -> Self {
|
||||||
TlsConfig {
|
TlsConfig {
|
||||||
certs: Either::Right(certs.to_vec().into()),
|
certs: Either::Right(certs.to_vec()),
|
||||||
key: Either::Right(key.to_vec().into()),
|
key: Either::Right(key.to_vec()),
|
||||||
ciphers: CipherSuite::default_set(),
|
ciphers: CipherSuite::default_set(),
|
||||||
prefer_server_cipher_order: Default::default(),
|
prefer_server_cipher_order: Default::default(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -267,7 +267,7 @@ impl<'a> CookieJar<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
drop(ops);
|
drop(ops);
|
||||||
self.get(name).map(|c| c.clone())
|
self.get(name).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrives the _original or pending_ `Cookie` inside this collection with
|
/// Retrives the _original or pending_ `Cookie` inside this collection with
|
||||||
|
@ -448,7 +448,7 @@ impl<'a> CookieJar<'a> {
|
||||||
/// TODO: This could be faster by just returning the cookies directly via
|
/// TODO: This could be faster by just returning the cookies directly via
|
||||||
/// an ordered hash-set of sorts.
|
/// an ordered hash-set of sorts.
|
||||||
pub(crate) fn take_delta_jar(&self) -> cookie::CookieJar {
|
pub(crate) fn take_delta_jar(&self) -> cookie::CookieJar {
|
||||||
let ops = std::mem::replace(&mut *self.ops.lock(), Vec::new());
|
let ops = std::mem::take(&mut *self.ops.lock());
|
||||||
let mut jar = cookie::CookieJar::new();
|
let mut jar = cookie::CookieJar::new();
|
||||||
|
|
||||||
for op in ops {
|
for op in ops {
|
||||||
|
|
|
@ -203,7 +203,7 @@ impl Limits {
|
||||||
let name = name.into();
|
let name = name.into();
|
||||||
match self.limits.binary_search_by(|(k, _)| k.cmp(&name)) {
|
match self.limits.binary_search_by(|(k, _)| k.cmp(&name)) {
|
||||||
Ok(i) => self.limits[i].1 = limit,
|
Ok(i) => self.limits[i].1 = limit,
|
||||||
Err(i) => self.limits.insert(i, (name.into(), limit))
|
Err(i) => self.limits.insert(i, (name, limit))
|
||||||
}
|
}
|
||||||
|
|
||||||
self
|
self
|
||||||
|
|
|
@ -821,9 +821,9 @@ impl fmt::Display for ErrorKind<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidChoice { choices } => {
|
ErrorKind::InvalidChoice { choices } => {
|
||||||
match choices.as_ref() {
|
match *choices.as_ref() {
|
||||||
&[] => write!(f, "invalid choice")?,
|
[] => write!(f, "invalid choice")?,
|
||||||
&[ref choice] => write!(f, "expected {}", choice)?,
|
[ref choice] => write!(f, "expected {}", choice)?,
|
||||||
_ => {
|
_ => {
|
||||||
write!(f, "expected one of ")?;
|
write!(f, "expected one of ")?;
|
||||||
for (i, choice) in choices.iter().enumerate() {
|
for (i, choice) in choices.iter().enumerate() {
|
||||||
|
|
|
@ -154,7 +154,7 @@ impl<'v> NameView<'v> {
|
||||||
/// assert_eq!(view.key(), None);
|
/// assert_eq!(view.key(), None);
|
||||||
/// ```
|
/// ```
|
||||||
pub fn shift(&mut self) {
|
pub fn shift(&mut self) {
|
||||||
const START_DELIMS: &'static [char] = &['.', '['];
|
const START_DELIMS: &[char] = &['.', '['];
|
||||||
|
|
||||||
let string = &self.name[self.end..];
|
let string = &self.name[self.end..];
|
||||||
let bytes = string.as_bytes();
|
let bytes = string.as_bytes();
|
||||||
|
|
|
@ -216,7 +216,7 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_one<'a, S: Into<String>>(&'a self, string: S) -> &'a str {
|
pub fn push_one<S: Into<String>>(&self, string: S) -> &str {
|
||||||
// SAFETY:
|
// SAFETY:
|
||||||
// * Aliasing: We retrieve a mutable reference to the last slot (via
|
// * Aliasing: We retrieve a mutable reference to the last slot (via
|
||||||
// `push()`) and then return said reference as immutable; these
|
// `push()`) and then return said reference as immutable; these
|
||||||
|
|
|
@ -329,7 +329,7 @@ impl<L, T: Len<L> + ?Sized> Len<L> for &T {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L, T: Len<L>> Len<L> for Option<T> {
|
impl<L, T: Len<L>> Len<L> for Option<T> {
|
||||||
fn len(&self) -> L { self.as_ref().map(|v| v.len()).unwrap_or(T::zero_len()) }
|
fn len(&self) -> L { self.as_ref().map(|v| v.len()).unwrap_or_else(T::zero_len) }
|
||||||
fn len_into_u64(len: L) -> u64 { T::len_into_u64(len) }
|
fn len_into_u64(len: L) -> u64 { T::len_into_u64(len) }
|
||||||
fn zero_len() -> L { T::zero_len() }
|
fn zero_len() -> L { T::zero_len() }
|
||||||
}
|
}
|
||||||
|
|
|
@ -154,7 +154,7 @@ impl FileName {
|
||||||
let file_name = std::path::Path::new(&self.0)
|
let file_name = std::path::Path::new(&self.0)
|
||||||
.file_name()
|
.file_name()
|
||||||
.and_then(|n| n.to_str())
|
.and_then(|n| n.to_str())
|
||||||
.and_then(|n| n.split(bad_char).filter(|s| !s.is_empty()).next())?;
|
.and_then(|n| n.split(bad_char).find(|s| !s.is_empty()))?;
|
||||||
|
|
||||||
// At this point, `file_name` can't contain `bad_chars` because of
|
// At this point, `file_name` can't contain `bad_chars` because of
|
||||||
// `.split()`, but it can be empty or reserved.
|
// `.split()`, but it can be empty or reserved.
|
||||||
|
|
|
@ -180,10 +180,10 @@ impl FileServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Vec<Route>> for FileServer {
|
impl From<FileServer> for Vec<Route> {
|
||||||
fn into(self) -> Vec<Route> {
|
fn from(server: FileServer) -> Self {
|
||||||
let source = figment::Source::File(self.root.clone());
|
let source = figment::Source::File(server.root.clone());
|
||||||
let mut route = Route::ranked(self.rank, Method::Get, "/<path..>", self);
|
let mut route = Route::ranked(server.rank, Method::Get, "/<path..>", server);
|
||||||
route.name = Some(format!("FileServer: {}", source).into());
|
route.name = Some(format!("FileServer: {}", source).into());
|
||||||
vec![route]
|
vec![route]
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ impl Client {
|
||||||
) -> Result<Client, Error> {
|
) -> Result<Client, Error> {
|
||||||
let rocket = rocket.local_launch().await?;
|
let rocket = rocket.local_launch().await?;
|
||||||
let cookies = RwLock::new(cookie::CookieJar::new());
|
let cookies = RwLock::new(cookie::CookieJar::new());
|
||||||
Ok(Client { rocket, tracked, cookies })
|
Ok(Client { rocket, cookies, tracked })
|
||||||
}
|
}
|
||||||
|
|
||||||
// WARNING: This is unstable! Do not use this method outside of Rocket!
|
// WARNING: This is unstable! Do not use this method outside of Rocket!
|
||||||
|
|
|
@ -114,7 +114,7 @@ macro_rules! pub_client_impl {
|
||||||
|
|
||||||
/// Deprecated alias to [`Client::tracked()`].
|
/// Deprecated alias to [`Client::tracked()`].
|
||||||
#[deprecated(
|
#[deprecated(
|
||||||
since = "0.5",
|
since = "0.5.0",
|
||||||
note = "choose between `Client::untracked()` and `Client::tracked()`"
|
note = "choose between `Client::untracked()` and `Client::tracked()`"
|
||||||
)]
|
)]
|
||||||
pub $($prefix)? fn new<P: Phase>(rocket: Rocket<P>) -> Result<Self, Error> {
|
pub $($prefix)? fn new<P: Phase>(rocket: Rocket<P>) -> Result<Self, Error> {
|
||||||
|
|
|
@ -47,7 +47,7 @@ impl Request<'_> {
|
||||||
method: Atomic::new(self.method()),
|
method: Atomic::new(self.method()),
|
||||||
uri: self.uri.clone(),
|
uri: self.uri.clone(),
|
||||||
headers: self.headers.clone(),
|
headers: self.headers.clone(),
|
||||||
remote: self.remote.clone(),
|
remote: self.remote,
|
||||||
state: self.state.clone(),
|
state: self.state.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -837,12 +837,12 @@ impl<'r> Request<'r> {
|
||||||
) -> Result<Request<'r>, Error<'r>> {
|
) -> Result<Request<'r>, Error<'r>> {
|
||||||
// Ensure that the method is known. TODO: Allow made-up methods?
|
// Ensure that the method is known. TODO: Allow made-up methods?
|
||||||
let method = Method::from_hyp(&hyper.method)
|
let method = Method::from_hyp(&hyper.method)
|
||||||
.ok_or_else(|| Error::BadMethod(&hyper.method))?;
|
.ok_or(Error::BadMethod(&hyper.method))?;
|
||||||
|
|
||||||
// In debug, make sure we agree with Hyper. Otherwise, cross our fingers
|
// In debug, make sure we agree with Hyper. Otherwise, cross our fingers
|
||||||
// and trust that it only gives us valid URIs like it's supposed to.
|
// and trust that it only gives us valid URIs like it's supposed to.
|
||||||
// TODO: Keep around not just the path/query, but the rest, if there?
|
// TODO: Keep around not just the path/query, but the rest, if there?
|
||||||
let uri = hyper.uri.path_and_query().ok_or_else(|| Error::InvalidUri(&hyper.uri))?;
|
let uri = hyper.uri.path_and_query().ok_or(Error::InvalidUri(&hyper.uri))?;
|
||||||
debug_assert!(Origin::parse(uri.as_str()).is_ok());
|
debug_assert!(Origin::parse(uri.as_str()).is_ok());
|
||||||
let uri = Origin::new(uri.path(), uri.query().map(Cow::Borrowed));
|
let uri = Origin::new(uri.path(), uri.query().map(Cow::Borrowed));
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ impl<'r> Body<'r> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn strip(&mut self) {
|
pub(crate) fn strip(&mut self) {
|
||||||
let body = std::mem::replace(self, Body::default());
|
let body = std::mem::take(self);
|
||||||
*self = match body.inner {
|
*self = match body.inner {
|
||||||
Inner::Seekable(b) | Inner::Phantom(b) => Body {
|
Inner::Seekable(b) | Inner::Phantom(b) => Body {
|
||||||
size: body.size,
|
size: body.size,
|
||||||
|
@ -322,7 +322,7 @@ impl<'r> Body<'r> {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn take(&mut self) -> Self {
|
pub fn take(&mut self) -> Self {
|
||||||
std::mem::replace(self, Body::default())
|
std::mem::take(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reads all of `self` into a vector of bytes, consuming the contents.
|
/// Reads all of `self` into a vector of bytes, consuming the contents.
|
||||||
|
|
|
@ -337,7 +337,7 @@ impl Event {
|
||||||
Some(RawLinedEvent::raw("")),
|
Some(RawLinedEvent::raw("")),
|
||||||
];
|
];
|
||||||
|
|
||||||
stream::iter(array::IntoIter::new(events)).filter_map(|v| ready(v))
|
stream::iter(array::IntoIter::new(events)).filter_map(ready)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -488,7 +488,7 @@ impl Rocket<Build> {
|
||||||
|
|
||||||
if config.secret_key.is_zero() {
|
if config.secret_key.is_zero() {
|
||||||
config.secret_key = crate::config::SecretKey::generate()
|
config.secret_key = crate::config::SecretKey::generate()
|
||||||
.unwrap_or(crate::config::SecretKey::zero());
|
.unwrap_or_else(crate::config::SecretKey::zero);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -123,7 +123,7 @@ impl<'a> RouteUri<'a> {
|
||||||
let source = origin.to_string().into();
|
let source = origin.to_string().into();
|
||||||
let metadata = Metadata::from(&base, &origin);
|
let metadata = Metadata::from(&base, &origin);
|
||||||
|
|
||||||
Ok(RouteUri { source, unmounted_origin, base, origin, metadata })
|
Ok(RouteUri { source, base, unmounted_origin, origin, metadata })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new `RouteUri`.
|
/// Create a new `RouteUri`.
|
||||||
|
@ -287,8 +287,8 @@ impl Metadata {
|
||||||
let trailing_path = path_segs.last().map_or(false, |p| p.trailing);
|
let trailing_path = path_segs.last().map_or(false, |p| p.trailing);
|
||||||
|
|
||||||
Metadata {
|
Metadata {
|
||||||
static_query_fields, path_color, query_color, trailing_path,
|
base_segs, path_segs, query_segs, static_query_fields,
|
||||||
path_segs, query_segs, base_segs,
|
path_color, query_color, trailing_path,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -279,7 +279,7 @@ impl Rocket<Orbit> {
|
||||||
|
|
||||||
let name = route.name.as_deref();
|
let name = route.name.as_deref();
|
||||||
let outcome = handle(name, || route.handler.handle(request, data)).await
|
let outcome = handle(name, || route.handler.handle(request, data)).await
|
||||||
.unwrap_or_else(|| Outcome::Failure(Status::InternalServerError));
|
.unwrap_or(Outcome::Failure(Status::InternalServerError));
|
||||||
|
|
||||||
// Check if the request processing completed (Some) or if the
|
// Check if the request processing completed (Some) or if the
|
||||||
// request needs to be forwarded. If it does, continue the loop
|
// request needs to be forwarded. If it does, continue the loop
|
||||||
|
|
|
@ -156,9 +156,9 @@ impl Default for Referrer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &Referrer {
|
impl From<&Referrer> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(referrer: &Referrer) -> Self {
|
||||||
let policy_string = match self {
|
let policy_string = match referrer {
|
||||||
Referrer::NoReferrer => "no-referrer",
|
Referrer::NoReferrer => "no-referrer",
|
||||||
Referrer::NoReferrerWhenDowngrade => "no-referrer-when-downgrade",
|
Referrer::NoReferrerWhenDowngrade => "no-referrer-when-downgrade",
|
||||||
Referrer::Origin => "origin",
|
Referrer::Origin => "origin",
|
||||||
|
@ -217,9 +217,9 @@ impl Default for ExpectCt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &ExpectCt {
|
impl From<&ExpectCt> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(expect: &ExpectCt) -> Self {
|
||||||
let policy_string = match self {
|
let policy_string = match expect {
|
||||||
ExpectCt::Enforce(age) => format!("max-age={}, enforce", age.whole_seconds()),
|
ExpectCt::Enforce(age) => format!("max-age={}, enforce", age.whole_seconds()),
|
||||||
ExpectCt::Report(age, uri) => {
|
ExpectCt::Report(age, uri) => {
|
||||||
format!(r#"max-age={}, report-uri="{}""#, age.whole_seconds(), uri)
|
format!(r#"max-age={}, report-uri="{}""#, age.whole_seconds(), uri)
|
||||||
|
@ -251,8 +251,8 @@ impl Default for NoSniff {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &NoSniff {
|
impl From<&NoSniff> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(_: &NoSniff) -> Self {
|
||||||
Header::new(NoSniff::NAME, "nosniff")
|
Header::new(NoSniff::NAME, "nosniff")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -301,9 +301,9 @@ impl Default for Hsts {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &Hsts {
|
impl From<&Hsts> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(hsts: &Hsts) -> Self {
|
||||||
if self == &Hsts::default() {
|
if hsts == &Hsts::default() {
|
||||||
static DEFAULT: Header<'static> = Header {
|
static DEFAULT: Header<'static> = Header {
|
||||||
name: Uncased::from_borrowed(Hsts::NAME),
|
name: Uncased::from_borrowed(Hsts::NAME),
|
||||||
value: Cow::Borrowed("max-age=31536000")
|
value: Cow::Borrowed("max-age=31536000")
|
||||||
|
@ -312,7 +312,7 @@ impl Into<Header<'static>> for &Hsts {
|
||||||
return DEFAULT.clone();
|
return DEFAULT.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
let policy_string = match self {
|
let policy_string = match hsts {
|
||||||
Hsts::Enable(age) => format!("max-age={}", age.whole_seconds()),
|
Hsts::Enable(age) => format!("max-age={}", age.whole_seconds()),
|
||||||
Hsts::IncludeSubDomains(age) => {
|
Hsts::IncludeSubDomains(age) => {
|
||||||
format!("max-age={}; includeSubDomains", age.whole_seconds())
|
format!("max-age={}; includeSubDomains", age.whole_seconds())
|
||||||
|
@ -353,9 +353,9 @@ impl Default for Frame {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &Frame {
|
impl From<&Frame> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(frame: &Frame) -> Self {
|
||||||
let policy_string: &'static str = match self {
|
let policy_string: &'static str = match frame {
|
||||||
Frame::Deny => "DENY",
|
Frame::Deny => "DENY",
|
||||||
Frame::SameOrigin => "SAMEORIGIN",
|
Frame::SameOrigin => "SAMEORIGIN",
|
||||||
};
|
};
|
||||||
|
@ -389,9 +389,9 @@ impl Default for XssFilter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &XssFilter {
|
impl From<&XssFilter> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(filter: &XssFilter) -> Self {
|
||||||
let policy_string: &'static str = match self {
|
let policy_string: &'static str = match filter {
|
||||||
XssFilter::Disable => "0",
|
XssFilter::Disable => "0",
|
||||||
XssFilter::Enable => "1",
|
XssFilter::Enable => "1",
|
||||||
XssFilter::EnableBlock => "1; mode=block",
|
XssFilter::EnableBlock => "1; mode=block",
|
||||||
|
@ -423,9 +423,9 @@ impl Default for Prefetch {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &Prefetch {
|
impl From<&Prefetch> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(prefetch: &Prefetch) -> Self {
|
||||||
let policy_string = match self {
|
let policy_string = match prefetch {
|
||||||
Prefetch::On => "on",
|
Prefetch::On => "on",
|
||||||
Prefetch::Off => "off",
|
Prefetch::Off => "off",
|
||||||
};
|
};
|
||||||
|
@ -645,9 +645,9 @@ impl Permission {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Header<'static>> for &Permission {
|
impl From<&Permission> for Header<'static> {
|
||||||
fn into(self) -> Header<'static> {
|
fn from(perm: &Permission) -> Self {
|
||||||
if self == &Permission::default() {
|
if perm == &Permission::default() {
|
||||||
static DEFAULT: Header<'static> = Header {
|
static DEFAULT: Header<'static> = Header {
|
||||||
name: Uncased::from_borrowed(Permission::NAME),
|
name: Uncased::from_borrowed(Permission::NAME),
|
||||||
value: Cow::Borrowed("interest-cohort=()")
|
value: Cow::Borrowed("interest-cohort=()")
|
||||||
|
@ -656,7 +656,7 @@ impl Into<Header<'static>> for &Permission {
|
||||||
return DEFAULT.clone();
|
return DEFAULT.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
let value = self.0.iter()
|
let value = perm.0.iter()
|
||||||
.map(|(feature, allow)| {
|
.map(|(feature, allow)| {
|
||||||
let list = allow.as_ref()
|
let list = allow.as_ref()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
|
@ -73,7 +73,7 @@ struct CustomHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CustomHandler {
|
impl CustomHandler {
|
||||||
fn new(data: &'static str) -> Vec<Route> {
|
fn routes(data: &'static str) -> Vec<Route> {
|
||||||
vec![Route::new(Get, "/<id>", Self { data })]
|
vec![Route::new(Get, "/<id>", Self { data })]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -107,6 +107,6 @@ fn rocket() -> _ {
|
||||||
.mount("/upload", vec![get_upload, post_upload])
|
.mount("/upload", vec![get_upload, post_upload])
|
||||||
.mount("/hello", vec![name.clone()])
|
.mount("/hello", vec![name.clone()])
|
||||||
.mount("/hi", vec![name])
|
.mount("/hi", vec![name])
|
||||||
.mount("/custom", CustomHandler::new("some data here"))
|
.mount("/custom", CustomHandler::routes("some data here"))
|
||||||
.register("/", vec![not_found_catcher])
|
.register("/", vec![not_found_catcher])
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue