Skip to content

Commit

Permalink
Merge pull request #2234 from alphagov/robots-txt
Browse files Browse the repository at this point in the history
Update robots.txt to ensure review app isn’t indexed
  • Loading branch information
36degrees authored May 21, 2021
2 parents ce5622a + 299d1fa commit 4f71da6
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 5 deletions.
15 changes: 10 additions & 5 deletions app/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,16 @@ module.exports = (options) => {
next()
})

// Ensure robots are still able to crawl the pages.
//
// This might seem like a mistake, but it's not. If a page is blocked by
// robots.txt, the crawler will never see the noindex directive, and so the
// page can still appear in search results.
app.get('/robots.txt', function (req, res) {
res.type('text/plain')
res.send('User-agent: *\nAllow: /')
})

// Set up middleware to serve static assets
app.use('/public', express.static(configPaths.public))

Expand Down Expand Up @@ -195,10 +205,5 @@ module.exports = (options) => {
// Full page example views
require('./full-page-examples.js')(app)

app.get('/robots.txt', function (req, res) {
res.type('text/plain')
res.send('User-agent: *\nDisallow: /')
})

return app
}
9 changes: 9 additions & 0 deletions app/app.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,15 @@ describe(`http://localhost:${PORT}`, () => {
})
})

describe('/robots.txt', () => {
it('should allow crawling by robots', done => {
requestPath('/robots.txt', (err, res) => {
expect(res.body).toMatch(/^Allow: \/$/m)
done(err)
})
})
})

describe('/examples/template-custom', () => {
const templatePath = '/examples/template-custom'

Expand Down

0 comments on commit 4f71da6

Please sign in to comment.