Add robots.txt so that our service is not crawled

Change-Id: I20c78537f665606c51a687657fd422b8b471f453
This commit is contained in:
Gabriel Wicke 2012-07-25 16:52:35 -07:00
parent c4e7544f60
commit 00e1f84eab

View file

@ -109,6 +109,12 @@ var textarea = function ( res, content ) {
res.write('</textarea><br><input type="submit"></form>');
};
/**
* robots.txt: no indexing.
*/
app.get(/^\/robots.txt$/, function(req, res){
res.end( "User-agent: *\nDisallow: /\n" );
});
/**