fix #5223 - allow robots.txt to be customized for each site

This commit is contained in:
sbwalker
2025-04-08 09:23:22 -04:00
parent 5dcc7c14f3
commit 020b7233d0
3 changed files with 40 additions and 3 deletions

View File

@ -74,8 +74,21 @@ namespace Oqtane.Infrastructure
// handle robots.txt root request (does not support subfolder aliases)
if (context.Request.Path.StartsWithSegments("/robots.txt") && string.IsNullOrEmpty(alias.Path))
{
// allow all user agents and specify site map
var robots = $"User-agent: *\n\nSitemap: {context.Request.Scheme}://{alias.Name}/sitemap.xml";
string robots = "";
if (sitesettings.ContainsKey("Robots") && !string.IsNullOrEmpty(sitesettings["Robots"]))
{
robots = sitesettings["Robots"];
}
else
{
// allow all user agents by default
robots = $"User-agent: *";
}
if (!robots.ToLower().Contains("sitemap"))
{
// add sitemap if not specified
robots += $"\n\nSitemap: {context.Request.Scheme}://{alias.Name}/sitemap.xml";
}
context.Response.ContentType = "text/plain";
await context.Response.WriteAsync(robots);
return;