Added deduplicatedHeaderId to avoid multiple same header id

This commit is contained in:
Wu Cheng-Han 2015-09-25 14:27:23 +08:00
parent 44cd5d4b56
commit bc31e9fe3b
3 changed files with 22 additions and 1 deletions

View File

@ -322,7 +322,26 @@ function autoLinkify(view) {
for (var level = 1; level <= 6; level++) {
linkifyAnchors(level, contentBlock);
}
};
}
function deduplicatedHeaderId(view) {
var headers = view.find(':header').toArray();
for (var i = 0; i < headers.length; i++) {
var id = $(headers[i]).attr('id');
if (!id) continue;
var duplicatedHeaders = view.find(':header[id=' + id + ']').toArray();
for (var j = 0; j < duplicatedHeaders.length; j++) {
if (duplicatedHeaders[j] != headers[i]) {
var newId = id + j;
var $duplicatedHeader = $(duplicatedHeaders[j]);
$duplicatedHeader.attr('id', newId);
var $headerLink = $duplicatedHeader.find('> .header-link');
$headerLink.attr('href', '#' + newId);
$headerLink.attr('title', newId);
}
}
}
}
function scrollToHash() {
var hash = location.hash;

View File

@ -1725,6 +1725,7 @@ function updateView() {
lastResult = $(result).clone();
finishView(ui.area.view);
autoLinkify(ui.area.view);
deduplicatedHeaderId(ui.area.view);
generateToc('toc');
generateToc('toc-affix');
generateScrollspy();

View File

@ -5,6 +5,7 @@ markdown.html(result.html());
$(document.body).show();
finishView(markdown);
autoLinkify(markdown);
deduplicatedHeaderId(markdown);
generateToc('toc');
generateToc('toc-affix');
smoothHashScroll();