Like in GitLab Pages, can bypass html extensions

This commit is contained in:
Pierre HUBERT 2022-04-03 14:30:44 +02:00
parent 8fd0981e78
commit d2daeb5879

View File

@ -42,6 +42,10 @@ struct Args {
/// Optional proxy IP
#[clap(short, long, env)]
proxy_ip: Option<String>,
/// Specify whether HTML extensions can be bypassed
#[clap(short, long, env)]
can_bypass_html_ext: bool,
}
impl Args {
@ -52,7 +56,7 @@ impl Args {
pub fn default_handler_path(&self) -> Option<PathBuf> {
match self.not_found_file.is_empty() {
true => None,
false => Some(self.storage_path().join(&self.not_found_file))
false => Some(self.storage_path().join(&self.not_found_file)),
}
}
}
@ -75,7 +79,6 @@ pub fn match_ip(pattern: &str, ip: &str) -> bool {
false
}
/// Get the remote IP address
fn get_remote_ip(req: &HttpRequest, args: &Args) -> String {
let mut ip = req.peer_addr().unwrap().ip().to_string();
@ -102,14 +105,19 @@ fn get_remote_ip(req: &HttpRequest, args: &Args) -> String {
ip
}
/// Replace all the files of the website
async fn replace_files(args: web::Data<Args>, req: HttpRequest, mut payload: Multipart) -> Result<HttpResponse, Error> {
async fn replace_files(
args: web::Data<Args>,
req: HttpRequest,
mut payload: Multipart,
) -> Result<HttpResponse, Error> {
// Validate remote IP
let remote_ip = get_remote_ip(&req, &args);
if !match_ip(&args.allowed_ips_for_update, &remote_ip) {
log::warn!("Block unauthorized attempt to perform site update from {}", remote_ip);
log::warn!(
"Block unauthorized attempt to perform site update from {}",
remote_ip
);
return Err(ErrorUnauthorized("You are not allowed to perform updates!"));
}
@ -118,8 +126,9 @@ async fn replace_files(args: web::Data<Args>, req: HttpRequest, mut payload: Mul
None => {
return Err(ErrorUnauthorized("Token required!"));
}
Some(t) => t.to_str()
.map_err(|_| ErrorInternalServerError("Failed to parse token!"))?
Some(t) => t
.to_str()
.map_err(|_| ErrorInternalServerError("Failed to parse token!"))?,
};
if !token.eq(&args.update_token) || args.update_token.is_empty() {
@ -129,8 +138,9 @@ async fn replace_files(args: web::Data<Args>, req: HttpRequest, mut payload: Mul
// Get base folder to keep from tar-file
let base_uri = match req.headers().get("BaseURI") {
None => "/",
Some(t) => t.to_str()
.map_err(|_| ErrorInternalServerError("Failed to parse base URI to keep!"))?
Some(t) => t
.to_str()
.map_err(|_| ErrorInternalServerError("Failed to parse base URI to keep!"))?,
};
let mut new_files = Vec::new();
@ -145,8 +155,10 @@ async fn replace_files(args: web::Data<Args>, req: HttpRequest, mut payload: Mul
}
let mut archive = tar::Archive::new(b.as_ref());
for entry in archive.entries()
.map_err(|_| ErrorInternalServerError("Failed to parse TAR archive!"))? {
for entry in archive
.entries()
.map_err(|_| ErrorInternalServerError("Failed to parse TAR archive!"))?
{
let mut file = entry?;
let inner_path = file.header().path()?;
let inner_path_str = inner_path.to_string_lossy();
@ -205,6 +217,41 @@ async fn replace_files(args: web::Data<Args>, req: HttpRequest, mut payload: Mul
Ok(HttpResponse::Ok().into())
}
async fn default_files_handler(req: ServiceRequest) -> Result<ServiceResponse, Error> {
let (req, _) = req.into_parts();
let args: &web::Data<Args> = req.app_data().unwrap();
// Search for alternate paths
if args.can_bypass_html_ext
&& !req.path().ends_with(".html")
&& !req.path().ends_with("/")
&& !req.path().is_empty() {
let alt_file = args.storage_path()
.join(format!("{}.html", &req.path()[1..]));
if alt_file.exists() {
let file = NamedFile::open_async(alt_file).await?;
let res = file.into_response(&req);
return Ok(ServiceResponse::new(req, res));
}
}
// Default handler
if let Some(h) = args.default_handler_path() {
let file = NamedFile::open_async(h).await?;
let res = file.into_response(&req);
Ok(ServiceResponse::new(req, res))
}
// Dummy response
else {
Ok(ServiceResponse::new(
req,
HttpResponse::NotFound().body("404 Not found"),
))
}
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let args: Args = Args::parse();
@ -219,33 +266,17 @@ async fn main() -> std::io::Result<()> {
HttpServer::new(move || {
App::new()
// Update service
.service(web::resource("/_mgmt/replace_files")
.route(web::post().to(replace_files)))
.service(web::resource("/_mgmt/replace_files").route(web::post().to(replace_files)))
// Serve a tree of static files at the web root and specify the index file.
// Note that the root path should always be defined as the last item. The paths are
// resolved in the order they are defined. If this would be placed before the `/images`
// path then the service for the static images would never be reached.
.service(Files::new("/", &args.files_path)
.index_file(&args.index_file)
.default_handler(fn_service(|req: ServiceRequest| async {
let (req, _) = req.into_parts();
let args: &web::Data<Args> = req.app_data().unwrap();
if let Some(h) = args.default_handler_path() {
let file = NamedFile::open_async(h).await?;
let res = file.into_response(&req);
Ok(ServiceResponse::new(req, res))
} else {
Ok(ServiceResponse::new(req, HttpResponse::NotFound()
.body("404 Not found")))
}
}))
.service(
Files::new("/", &args.files_path)
.index_file(&args.index_file)
.default_handler(fn_service(default_files_handler)),
)
// Enable the logger.
.wrap(Logger::default())
.app_data(web::Data::new(args.clone()))
@ -253,4 +284,4 @@ async fn main() -> std::io::Result<()> {
.bind(listen_address)?
.run()
.await
}
}