forked from neri/datatrash
fix: payload too large, failed binary content type detection
This commit is contained in:
parent
3da9f1117e
commit
24c4307ce5
|
@ -424,7 +424,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "datatrash"
|
||||
version = "2.0.4"
|
||||
version = "2.0.5"
|
||||
dependencies = [
|
||||
"actix-files",
|
||||
"actix-governor",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "datatrash"
|
||||
version = "2.0.4"
|
||||
version = "2.0.5"
|
||||
authors = ["neri"]
|
||||
edition = "2021"
|
||||
|
||||
|
|
|
@ -3,11 +3,7 @@ use actix_multipart::{Field, Multipart};
|
|||
use actix_web::{error, http::header::DispositionParam, Error};
|
||||
use futures_util::{StreamExt, TryStreamExt};
|
||||
use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_PLAIN};
|
||||
use std::{
|
||||
cmp::{max, min},
|
||||
io::ErrorKind,
|
||||
path::Path,
|
||||
};
|
||||
use std::{cmp::min, io::ErrorKind, path::Path};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
use tokio::{
|
||||
fs::{self, File},
|
||||
|
@ -72,7 +68,8 @@ pub(crate) async fn parse_multipart_inner(
|
|||
content_type = Some(
|
||||
mime.filter(|mime| *mime != APPLICATION_OCTET_STREAM)
|
||||
.map(mime_relations::get_alias)
|
||||
.unwrap_or_else(|| get_content_type(&first_bytes)),
|
||||
.or_else(|| get_content_type(&first_bytes))
|
||||
.unwrap_or(APPLICATION_OCTET_STREAM),
|
||||
);
|
||||
}
|
||||
"text" => {
|
||||
|
@ -81,7 +78,7 @@ pub(crate) async fn parse_multipart_inner(
|
|||
}
|
||||
let first_bytes;
|
||||
(size, first_bytes) = create_file(file_path, field, config.max_file_size).await?;
|
||||
content_type = Some(get_content_type(&first_bytes));
|
||||
content_type = Some(get_content_type(&first_bytes).unwrap_or(TEXT_PLAIN));
|
||||
}
|
||||
"delete_on_download" => {
|
||||
delete_on_download = parse_string(&name, &mut field).await? != "false";
|
||||
|
@ -193,16 +190,12 @@ async fn write_to_file(
|
|||
let mut written_bytes: u64 = 0;
|
||||
while let Some(chunk) = field.next().await {
|
||||
let chunk = chunk.map_err(error::ErrorBadRequest)?;
|
||||
let remaining_first_bytes = min(max(0, 2048 - written_bytes) as usize, chunk.len());
|
||||
first_bytes.extend_from_slice(&chunk[0..remaining_first_bytes]);
|
||||
written_bytes += chunk.len() as u64;
|
||||
if let Some(max_size) = max_size {
|
||||
if written_bytes > max_size {
|
||||
return Err(error::ErrorBadRequest(format!(
|
||||
"exceeded maximum file size of {max_size} bytes"
|
||||
)));
|
||||
}
|
||||
}
|
||||
validate_max_size(written_bytes, max_size)?;
|
||||
|
||||
let remaining_first_bytes = min(2048 - first_bytes.len(), chunk.len());
|
||||
first_bytes.extend_from_slice(&chunk[..remaining_first_bytes]);
|
||||
|
||||
file.write_all(&chunk).await.map_err(|write_err| {
|
||||
log::error!("could not write file {:?}", write_err);
|
||||
error::ErrorInternalServerError("could not write file")
|
||||
|
@ -211,6 +204,17 @@ async fn write_to_file(
|
|||
Ok((written_bytes, first_bytes))
|
||||
}
|
||||
|
||||
fn validate_max_size(written_bytes: u64, max_size: Option<u64>) -> Result<(), Error> {
|
||||
if let Some(max_size) = max_size {
|
||||
if written_bytes > max_size {
|
||||
return Err(error::ErrorPayloadTooLarge(format!(
|
||||
"exceeded maximum file size of {max_size} bytes"
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_file_metadata(field: &actix_multipart::Field) -> (Option<Mime>, Option<String>) {
|
||||
let mime = field.content_type().cloned();
|
||||
let filename = field
|
||||
|
@ -224,9 +228,6 @@ fn get_file_metadata(field: &actix_multipart::Field) -> (Option<Mime>, Option<St
|
|||
(mime, filename)
|
||||
}
|
||||
|
||||
fn get_content_type(bytes: &[u8]) -> Mime {
|
||||
tree_magic_mini::from_u8(bytes)
|
||||
.parse()
|
||||
.ok()
|
||||
.unwrap_or(TEXT_PLAIN)
|
||||
fn get_content_type(bytes: &[u8]) -> Option<Mime> {
|
||||
tree_magic_mini::from_u8(bytes).parse().ok()
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue