Updated to add headers to prevent search engine crawl some unnecessary paths
This commit is contained in:
parent
c5f6c2bab3
commit
b6ca50072e
1 changed files with 3 additions and 1 deletions
|
@ -334,7 +334,8 @@ function actionDownload(req, res, noteId) {
|
|||
'Content-Type': 'text/markdown; charset=UTF-8',
|
||||
'Cache-Control': 'private',
|
||||
'Content-disposition': 'attachment; filename=' + filename + '.md',
|
||||
'Content-Length': body.length
|
||||
'Content-Length': body.length,
|
||||
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
|
||||
});
|
||||
res.end(body);
|
||||
});
|
||||
|
@ -367,6 +368,7 @@ function actionPDF(req, res, noteId) {
|
|||
res.setHeader('Content-disposition', 'attachment; filename="' + filename + '.pdf"');
|
||||
res.setHeader('Cache-Control', 'private');
|
||||
res.setHeader('Content-Type', 'application/pdf; charset=UTF-8');
|
||||
res.setHeader('X-Robots-Tag', 'noindex, nofollow'); // prevent crawling
|
||||
stream.pipe(res);
|
||||
fs.unlink(path);
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue