Browse Source
* feat: add support for robots.txt Can toggle disabling search engine indexing. Closes #2684 * fix: unexport ts constpull/2924/head
10 changed files with 122 additions and 2 deletions
@ -0,0 +1,28 @@
@@ -0,0 +1,28 @@
|
||||
package controllers |
||||
|
||||
import ( |
||||
"net/http" |
||||
"strings" |
||||
|
||||
"github.com/owncast/owncast/core/data" |
||||
) |
||||
|
||||
// GetRobotsDotTxt returns the contents of our robots.txt.
|
||||
func GetRobotsDotTxt(w http.ResponseWriter, r *http.Request) { |
||||
w.Header().Set("Content-Type", "text/plain") |
||||
contents := []string{ |
||||
"User-agent: *", |
||||
"Disallow: /admin", |
||||
"Disallow: /api", |
||||
} |
||||
|
||||
if data.GetDisableSearchIndexing() { |
||||
contents = append(contents, "Disallow: /") |
||||
} |
||||
|
||||
txt := []byte(strings.Join(contents, "\n")) |
||||
|
||||
if _, err := w.Write(txt); err != nil { |
||||
http.Error(w, err.Error(), http.StatusInternalServerError) |
||||
} |
||||
} |
Loading…
Reference in new issue