-- autotype.lua -- Copyright 2020-2026 Stephan Hennig and Keno Wehr --[[ This work may be distributed and/or modified under the conditions of the LaTeX Project Public License, either version 1.3 of this license or (at your option) any later version. The latest version of this license is in http://www.latex-project.org/lppl.txt and version 1.3 or later is part of all distributions of LaTeX version 2005/12/01 or later. ]] -- luacheck: globals font luatexbase node tex luatexbase.provides_module({ name = "autotype", version = "0.6", date = "2026-04-27", description = "automatic language-specific typography" }) local Ncopy = node.copy local NLcopy = node.copy_list local Ninsert_after = node.insert_after local Ninsert_before = node.insert_before local Nremove = node.remove local Nnew = node.new local Nhas_attribute = node.has_attribute local Uchar = utf8.char local HYPHEN = 0x2D -- ASCII/Unicode codepoint of a hyphen local CHAR_c = 0x63 -- ASCII/Unicode codepoint of the letter c local ZWNJ = 0x200C -- Unicode codepoint of ZWNJ local kern_templ = Nnew('kern', 0) -- subtype 0 means fontkern local ZWNJ_templ = Nnew('glyph') ZWNJ_templ.char = ZWNJ local function get_kern_node(dim) local n = Ncopy(kern_templ) n.kern = dim return n end -- Penalty values for primary, secondary, and tertiary hyphenation points local PENALTY_I = 30 local PENALTY_II = 60 local PENALTY_III = 90 local font_kern_values = {} local long_s_codepoint = {} local round_s_codepoint = {} local final_round_s_codepoint = {} local function normalize_font(TeX_font_name) -- We have to do some normalization of the name of the current font. -- Examples: The yfrak font at 11pt is called "yfrak at 10.95pt" by TeX, but we need only "yfrak". -- The yfrak font becomes something like "yfrak+100ls" if it is letterspaced (e.g. with microtype's \textls commmand), -- but again, we only need "yfrak". return string.gsub(TeX_font_name, "[ :%+].*", "") end local function set_long_s_codepoint(font, codepoint) long_s_codepoint[normalize_font(font)] = codepoint end local function set_round_s_codepoint(font, codepoint) round_s_codepoint[normalize_font(font)] = codepoint end local function set_final_round_s_codepoint(font, codepoint) final_round_s_codepoint[normalize_font(font)] = codepoint end local function get_long_s_codepoint(font_id) local font = normalize_font(tex.fontname(font_id)) local codepoint if long_s_codepoint[font] then codepoint = long_s_codepoint[font] -- for fonts with irregular encodings else codepoint = 0x17F -- Unicode codepoint of ſ end return codepoint end local function get_round_s_codepoint(font_id) local font = normalize_font(tex.fontname(font_id)) local codepoint if round_s_codepoint[font] then codepoint = round_s_codepoint[font] -- for fonts with irregular encodings else codepoint = 0x73 -- ASCII/Unicode codepoint of s end return codepoint end local function get_final_round_s_codepoint(font_id) local font = normalize_font(tex.fontname(font_id)) local codepoint if final_round_s_codepoint[font] then codepoint = final_round_s_codepoint[font] -- for fonts like wesu14 else codepoint = get_round_s_codepoint(font_id) end return codepoint end local function get_current_long_s_codepoint() return get_long_s_codepoint(font.current()) end local function get_current_round_s_codepoint() return get_round_s_codepoint(font.current()) end local function insert_discretionary(head, first, second, penalty, pre_char) -- Create discretionary node local d = Nnew('disc', 0) d.attr = first.attr d.penalty = penalty -- Set pre-break text if pre_char then local pre = Ncopy(first) pre.char = pre_char d.pre = pre end -- Insert discretionary before second node Ninsert_before(head, second, d) end -- insert a discretionary for cases like backen -> bak-ken local function insert_ck_discretionary(head, first, second, penalty, pre_char) -- Create discretionary node local d = Nnew('disc', 0) d.attr = first.attr d.penalty = penalty -- Set pre-break text if pre_char then local pre1 = Ncopy(first) local pre2 = Ncopy(first) local rep = Ncopy(first) pre1.char = second.char pre2.char = pre_char rep.char = first.char pre1.next = pre2 pre2.prev = pre1 d.pre = pre1 d.replace = rep end -- Insert discretionary before second node Ninsert_before(head, second, d) -- Remove first node Nremove(head, first) end -- insert a discretionary for cases like Schiffahrt -> Schiff-fahrt local function insert_three_consonant_discretionary(head, first, second, penalty, pre_char) -- Create discretionary node local d = Nnew('disc', 0) d.attr = first.attr d.penalty = penalty -- Set pre-break text if pre_char then local pre1 = Ncopy(first) local pre2 = Ncopy(first) local pre3 = Ncopy(first) local rep = Ncopy(first) pre1.char = first.char pre2.char = first.char pre3.char = pre_char rep.char = first.char pre1.next = pre2 pre2.prev = pre1 pre2.next = pre3 pre3.prev = pre2 d.pre = pre1 d.replace = rep end -- Insert discretionary before second node Ninsert_before(head, second, d) -- Remove first node Nremove(head, first) end -- @param head Head of a node list. local function insert_special_hyphenation_points(head, scan_node_list, hyphen_penalty) -- Do pattern matching. local words = scan_node_list(head) -- Iterate over words. for _, word in ipairs(words) do for i, level in ipairs(word.levels) do -- Spot with surrounding top-level nodes? if (level % 2 == 1) and not word.parents[i-1] and not word.parents[i] then if word.nodes[i-1].char == CHAR_c then insert_ck_discretionary(head, word.nodes[i-1], word.nodes[i], hyphen_penalty, HYPHEN) else insert_three_consonant_discretionary(head, word.nodes[i-1], word.nodes[i], hyphen_penalty, HYPHEN) end end end end end -- @param head Head of a node list. local function insert_primary_hyphenation_points(head, scan_node_list_i, scan_node_list_iii) -- Do pattern matching. local words_i = scan_node_list_i(head) local words_iii = scan_node_list_iii(head) -- Iterate over words. for i, word in ipairs(words_i) do if not word.exhyphenchars then -- Process words not containing explicit hyphens for k, level_i in ipairs(word.levels) do -- Spot with surrounding top-level nodes? -- Even for primary hyphenation points we require a regular (tertiary) hyphenation point. -- This avoids wrong hyphenations like "Sch-rattenthal" (generalized by patgen from "Arsch-ratte" and "Busch-ratte") in some cases. if level_i % 2 == 1 and words_iii[i].levels[k] % 2 == 1 and not word.parents[k-1] and not word.parents[k] then insert_discretionary(head, word.nodes[k-1], word.nodes[k], PENALTY_I, HYPHEN) end end else -- Process words containing explicit hyphens local exhyphenchar_num = #word.exhyphenchars -- the number of explicit hyphens in the word local char_num = #word.nodes -- the number of characters of the word for j, k in ipairs(word.exhyphenchars) do -- Primary hyphenation point at the explicit hyphen if k > 1 and k < char_num and (j == exhyphenchar_num or word.exhyphenchars[j+1] > k+1) then insert_discretionary(head, word.nodes[k], word.nodes[k+1], PENALTY_I) end end end end end -- The following function is needed for the parts of a word with explicit hyphens. -- @param head Head of a node list. local function insert_tertiary_hyphenation_points(head, original_word, start_number, end_number, scan_node_list) local node_list = NLcopy(original_word.nodes[start_number], original_word.nodes[end_number].next) local words = scan_node_list(node_list) if #words > 0 then for i, level in ipairs(words[1].levels) do local j = start_number + i - 1 -- Spot with surrounding top-level nodes? if (level % 2 == 1) and not original_word.parents[j-1] and not original_word.parents[j] then insert_discretionary(head, original_word.nodes[j-1], original_word.nodes[j], PENALTY_III, HYPHEN) end end end end -- @param head Head of a node list. local function insert_weighted_hyphenation_points(head, scan_node_list_i, scan_node_list_ii, scan_node_list_iii) -- Do pattern matching local words_i = scan_node_list_i(head) local words_ii = scan_node_list_ii(head) local words_iii = scan_node_list_iii(head) -- Iterate over words for i, word in ipairs(words_iii) do if not word.exhyphenchars then -- Process words not containing explicit hyphens for k, level_iii in ipairs(word.levels) do -- Surrounding top-level nodes? if not word.parents[k-1] and not word.parents[k] then -- Even for primary and secondary hyphenation points we require a regular (tertiary) hyphenation point. -- This avoids wrong hyphenations like "Sch-rattenthal" (generalized by patgen from "Arsch-ratte" and "Busch-ratte") in some cases. if level_iii % 2 == 1 then -- Primary spot? if words_i[i].levels[k] % 2 == 1 then insert_discretionary(head, word.nodes[k-1], word.nodes[k], PENALTY_I, HYPHEN) else -- Secondary spot? if words_ii[i].levels[k] % 2 == 1 then insert_discretionary(head, word.nodes[k-1], word.nodes[k], PENALTY_II, HYPHEN) else -- Tertiary spot insert_discretionary(head, word.nodes[k-1], word.nodes[k], PENALTY_III, HYPHEN) end end end end end else -- Process words containing explicit hyphens local exhyphenchar_num = #word.exhyphenchars -- the number of explicit hyphens in the word local char_num = #word.nodes -- the number of characters of the word for j, k in ipairs(word.exhyphenchars) do -- Tertiary hyphenation points for the word part before the next explicit hyphen if j == 1 then if k > 1 then -- k == 1 means that the first character is a hyphen. insert_tertiary_hyphenation_points(head, word, 1, k-1, scan_node_list_iii) end elseif word.exhyphenchars[j-1] + 1 < k then insert_tertiary_hyphenation_points(head, word, word.exhyphenchars[j-1]+1, k-1, scan_node_list_iii) end -- Primary hyphenation point at the explicit hyphen if k > 1 and k < char_num and (j == exhyphenchar_num or word.exhyphenchars[j+1] > k+1) then insert_discretionary(head, word.nodes[k], word.nodes[k+1], PENALTY_I) end end -- Tertiary hyphenation points for the word part after the last explicit hyphen if word.exhyphenchars[exhyphenchar_num] < char_num then insert_tertiary_hyphenation_points(head, word, word.exhyphenchars[exhyphenchar_num]+1, char_num, scan_node_list_iii) end end end end -- converts scaled points to big points, rounded to one decimal place local function sp_to_bp(num) return math.floor(num / 65782 * 10) / 10 end -- The following function marks hyphenation points by a small coloured bar. -- The code is based on the showhyphens package written by Patrick Gundlach. local function mark_hyphenation_points(head, lang_name, lang_num) local current_lang local current = head while current do if current.id == node.id('hlist') or current.id == node.id('vlist') then mark_hyphenation_points(current.list, lang_name, lang_num) elseif current.id == node.id('glyph') then current_lang = current.lang elseif current.id == node.id('disc') and Nhas_attribute(current, luatexbase.registernumber("autotype_"..lang_name.."_mark_hyph_attr")) and current_lang == lang_num then local colour = "1 0 0" -- red if current.penalty == PENALTY_I then colour = "0 0.6 0" -- dark green elseif current.penalty == PENALTY_II then colour = "0 0.2 0.8" -- blue elseif current.penalty == PENALTY_III then colour = "1 0.5 0" -- orange end if current.replace and current.replace.id == node.id('glyph') and current.replace.components then local wd = sp_to_bp(current.replace.width) or 0 local ht = sp_to_bp(current.replace.height) + 1 or 0 local r = node.new("whatsit", "pdf_literal") r.data = "q "..colour.." RG 0.7 w 0 " .. tostring(ht) .. " m " .. tostring(-wd) .. " " .. tostring(ht) .. " l S Q" Ninsert_after(current.replace, current.replace, r) else local n = node.new("whatsit", "pdf_literal") n.mode = 0 if current.penalty == PENALTY_I then n.data = "q "..colour.." RG 0.7 w 0 -1 m 0 8 l S Q" elseif current.penalty == PENALTY_II then n.data = "q "..colour.." RG 0.7 w 0 -1 m 0 3 l S Q" else n.data = "q "..colour.." RG 0.7 w 0 4 m 0 8 l S Q" end n.next = current.next current.next.prev = n n.prev = current current.next = n current = n end end current = current.next end end --- Manipulation that prevents selected ligatures. -- This manipulation inserts a kern node between glyph nodes at every position -- indicated by the ligature suppression patterns. The manipulation has to be -- applied before TeX's ligaturing stage. The pos variable points to the glyph -- node after a spot. If this glyph node is preceeded by a discretionary, the -- kern node is inserted as replace field of the discretionary, otherwise -- directly before the glyph. -- @param head Head of a node list. local function suppress_ligatures(head, scan_node_list, lang_name) -- Do pattern matching. local words = scan_node_list(head) -- Iterate over words. for _, word in ipairs(words) do -- Debug output. local w = {} for _, n in ipairs(word.nodes) do table.insert(w, Uchar(n.char)) end -- Check all valid spots. for pos, level in ipairs(word.levels) do -- Valid spot? if (level % 2) == 1 then -- Apply manipulation to glyph nodes at indices pos-1 and pos. -- Only plain top-level glyph nodes are handled currently. if not word.parents[pos-1] and not word.parents[pos] then local first_glyph = word.nodes[pos-1] local second_glyph = word.nodes[pos] local font_id = first_glyph.font if Nhas_attribute(first_glyph, luatexbase.registernumber("autotype_"..lang_name.."_lig_supp_attr")) then local current_font = font.getfont(font_id) if current_font and current_font.name and string.find(current_font.name, "mode=harf;") then -- insert ZWNJ for Harfbuzz renderer local zwnj_node = Ncopy(ZWNJ_templ) zwnj_node.font = first_glyph.font zwnj_node.lang = first_glyph.lang Ninsert_after(head, first_glyph, zwnj_node) else -- insert kern node for Node renderer if not font_kern_values[font_id] then if current_font and current_font.resources and current_font.resources.sequences then for _, t in ipairs(current_font.resources.sequences) do if t.features and t.features.kern and t.steps and t.steps[1] and t.steps[1].coverage then font_kern_values[font_id] = t.steps[1].coverage end end end end local kern_value = 0 if font_kern_values[font_id] and font_kern_values[font_id][first_glyph.char] and font_kern_values[font_id][first_glyph.char][second_glyph.char] and type(font_kern_values[font_id][first_glyph.char][second_glyph.char] == "number") and current_font.units_per_em and current_font.size then kern_value = font_kern_values[font_id][first_glyph.char][second_glyph.char] / current_font.units_per_em * current_font.size end local kern_node = get_kern_node(kern_value) if first_glyph.next.id == node.id('disc') then first_glyph.next.replace = first_glyph.next.replace or kern_node else Ninsert_after(head, first_glyph, kern_node) end end end end end end end end --- Manipulation that inserts long s glyphs. -- All round s glyphs (char code 0x73, LATIN SMALL LETTER S) not -- followed by a spot are replaced by a long s glyph (char code 0x017f, -- LATIN SMALL LETTER LONG S). -- -- @param head Head of a node list. local function insert_long_s(head, scan_node_list, lang_name) -- Do pattern matching. local words = scan_node_list(head) -- Iterate over words. for _, word in ipairs(words) do -- Replace all round s glyphs not followed by a spot by a long s -- glyph except for the last character of a word. for i, n in ipairs(word.nodes) do if n.char == 0x73 and Nhas_attribute(n, luatexbase.registernumber("autotype_"..lang_name.."_long_s_attr")) then if i == #word.levels - 1 then -- last character of the word n.char = get_final_round_s_codepoint(n.font) elseif word.levels[i+1] % 2 == 1 then n.char = get_round_s_codepoint(n.font) else n.char = get_long_s_codepoint(n.font) end end end end end -- Call-back registering. -- -- Load padrinoma module. local padrinoma = require('autotype-pdnm_nl_manipulation') local function get_hyphenation_pattern_file(lang_name, lang_num, hyph_type) local pattern_file if (lang_name == 'ngerman' or lang_name == 'naustrian' or lang_name == 'nswissgerman' or lang_name == 'german-de' or lang_name == 'german-germany' or lang_name == 'german-at' or lang_name == 'german-austria' or lang_name == 'german-ch' or lang_name == 'german-switzerland') then if hyph_type == 'tertiary' then -- The tertiary hyphenation patterns are the default patterns of LuaTeX. pattern_file = 'hyph-de-1996.pat.txt' elseif hyph_type ~= 'special' then pattern_file = 'autotype-hyph-de-1996-'..hyph_type..'.pat.txt' end elseif lang_name == 'swissgerman' or lang_name == 'german-ch-1901' or lang_name == 'german-switzerland-1901' then if hyph_type == 'special' then pattern_file = 'autotype-hyph-de-1901-special.pat.txt' -- There are no special hyphenation patterns for Switzerland alone. elseif hyph_type == 'tertiary' then pattern_file = 'hyph-de-ch-1901.pat.txt' -- hyph-utf8 only uses lower-case letters in file names. else pattern_file = 'autotype-hyph-de-CH-1901-'..hyph_type..'.pat.txt' end elseif lang_name == 'austrian' or lang_name == 'german-de-1901' or lang_name == 'german-germany-1901' or lang_name == 'german-at-1901' or lang_name == 'german-austria-1901' then if hyph_type == 'tertiary' then -- The tertiary hyphenation patterns are the default patterns of LuaTeX. pattern_file = 'hyph-de-1901.pat.txt' else pattern_file = 'autotype-hyph-de-1901-'..hyph_type..'.pat.txt' end elseif lang_name == 'german' then if lang_num == luatexbase.registernumber('l@ngerman') or lang_num == luatexbase.registernumber('l@ngerman-x-latest') then if hyph_type == 'tertiary' then -- The tertiary hyphenation patterns are the default patterns of LuaTeX. pattern_file = 'hyph-de-1996.pat.txt' elseif hyph_type ~= 'special' then pattern_file = 'autotype-hyph-de-1996-'..hyph_type..'.pat.txt' end elseif hyph_type == 'tertiary' then -- The tertiary hyphenation patterns are the default patterns of LuaTeX. pattern_file = 'hyph-de-1901.pat.txt' else pattern_file = 'autotype-hyph-de-1901-'..hyph_type..'.pat.txt' end end return pattern_file end local original_patterns = {} local function clear_patterns(lang_num) local luatex_lang = lang.new(lang_num) if not original_patterns[lang_num] then original_patterns[lang_num] = lang.patterns(luatex_lang) end lang.clear_patterns(luatex_lang) end local function restore_patterns(lang_num) if original_patterns[lang_num] then lang.patterns(lang.new(lang_num), original_patterns[lang_num]) end end local function default_hyph(lang_name, lang_num) -- Remove primary, special or weighted hyphenation from hyphenate callback if luatexbase.in_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) elseif luatexbase.in_callback('hyphenate', 'autotype special hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype special hyphenation for '..lang_name) elseif luatexbase.in_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) end -- Restore original patterns to be used by TeX's default hyphenation algorithm restore_patterns(lang_num) end local function primary_hyph(lang_name, lang_num) -- Clear the language's patterns to avoid insertion of hyphenation points by TeX clear_patterns(lang_num) -- Remove special hyphenation from hyphenate callback if luatexbase.in_callback('hyphenate', 'autotype special hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype special hyphenation for '..lang_name) -- Remove weighted hyphenation from hyphenate callback elseif luatexbase.in_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) end local pattern_file_i = get_hyphenation_pattern_file(lang_name, lang_num, 'primary') -- Even for primary hyphenation points we require a regular (tertiary) hyphenation point. -- This avoids wrong hyphenations like "Sch-rattenthal" (generalized by patgen from "Arsch-ratte" and "Busch-ratte") in some cases. local pattern_file_iii = get_hyphenation_pattern_file(lang_name, lang_num, 'tertiary') if pattern_file_i and pattern_file_iii then local scan_node_list_i = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file_i, -1, -1) local scan_node_list_iii = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file_iii, -1, -1) -- Register callback for primary hyphenation if not luatexbase.in_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) then luatexbase.add_to_callback('hyphenate', function (head, _) -- Apply default hyphenation (this is important for other languages). lang.hyphenate(head) -- Apply node list manipulation. insert_primary_hyphenation_points(head, scan_node_list_i, scan_node_list_iii) end, 'autotype primary hyphenation for '..lang_name) end else tex.print("\\UseName{msg_warning:nnnn}{autotype}{unsupported-hyphenation}{Primary}{"..lang_name.."}") end end local function special_hyph(lang_name, lang_num) -- Restore original patterns to be used by TeX's default hyphenation algorithm -- in case they have been cleared before (primary or weighted hyphenation) restore_patterns(lang_num) -- Remove primary hyphenation from hyphenate callback if luatexbase.in_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) -- Remove weighted hyphenation from hyphenate callback elseif luatexbase.in_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) end local pattern_file = get_hyphenation_pattern_file(lang_name, lang_num, 'special') if pattern_file then local scan_node_list = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file, -1, -1) -- Register callback for special hyphenation if not luatexbase.in_callback('hyphenate', 'autotype special hyphenation for '..lang_name) then luatexbase.add_to_callback('hyphenate', function (head, _) -- Apply default hyphenation (this is important for other languages). lang.hyphenate(head) -- Apply node list manipulation. insert_special_hyphenation_points(head, scan_node_list, tex.hyphenpenalty) end, 'autotype special hyphenation for '..lang_name) end else tex.print("\\UseName{msg_warning:nnnn}{autotype}{unsupported-hyphenation}{Special}{"..lang_name.."}") end end local function weighted_hyph(lang_name, lang_num) -- Clear the language's patterns to avoid insertion of hyphenation points by TeX clear_patterns(lang_num) -- Remove primary hyphenation from hyphenate callback if luatexbase.in_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype primary hyphenation for '..lang_name) -- Remove special hyphenation from hyphenate callback elseif luatexbase.in_callback('hyphenate', 'autotype special hyphenation for '..lang_name) then luatexbase.remove_from_callback('hyphenate', 'autotype special hyphenation for '..lang_name) end local pattern_file_i = get_hyphenation_pattern_file(lang_name, lang_num, 'primary') local pattern_file_ii = get_hyphenation_pattern_file(lang_name, lang_num, 'secondary') local pattern_file_iii = get_hyphenation_pattern_file(lang_name, lang_num, 'tertiary') local pattern_file_special = get_hyphenation_pattern_file(lang_name, lang_num, 'special') if pattern_file_i and pattern_file_ii and pattern_file_iii then local scan_node_list_i = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file_i, -1, -1) local scan_node_list_ii = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file_ii, -1, -1) local scan_node_list_iii = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file_iii, -1, -1) if pattern_file_special then local scan_node_list_special = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file_special, -1, -1) -- Register callback for weighted hyphenation including special hyphenation if not luatexbase.in_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) then luatexbase.add_to_callback('hyphenate', function (head, _) -- Apply default hyphenation (this is important for other languages). lang.hyphenate(head) -- Apply node list manipulation. insert_weighted_hyphenation_points(head, scan_node_list_i, scan_node_list_ii, scan_node_list_iii) insert_special_hyphenation_points(head, scan_node_list_special, PENALTY_III) end, 'autotype weighted hyphenation for '..lang_name) end else -- Register callback for weighted hyphenation if not luatexbase.in_callback('hyphenate', 'autotype weighted hyphenation for '..lang_name) then luatexbase.add_to_callback('hyphenate', function (head, _) -- Apply default hyphenation (this is important for other languages). lang.hyphenate(head) -- Apply node list manipulation. insert_weighted_hyphenation_points(head, scan_node_list_i, scan_node_list_ii, scan_node_list_iii) end, 'autotype weighted hyphenation for '..lang_name) end end else tex.print("\\UseName{msg_warning:nnnn}{autotype}{unsupported-hyphenation}{Weighted}{"..lang_name.."}") end end local function mark_hyph(lang_name, lang_num) luatexbase.add_to_callback('post_linebreak_filter', function(head, _) mark_hyphenation_points(head, lang_name, lang_num) return true end, 'autotype marking of hyphenation points for '..lang_name) end local function lig_supp(lang_name, lang_num) local pattern_file = 'autotype-liga-de.pat.txt' -- TO DO: adapt to language local scan_node_list = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file, 2, 2) -- Remove callback for regular ligaturing if present if luatexbase.in_callback('ligaturing', 'TeX ligaturing') then luatexbase.remove_from_callback('ligaturing', 'TeX ligaturing') end -- Register callback for preventing ligatures luatexbase.add_to_callback('ligaturing', function (head, _) -- Apply node list manipulation. suppress_ligatures(head, scan_node_list, lang_name) end, 'autotype ligaturing for '..lang_name) -- Register callback for regular ligaturing luatexbase.add_to_callback('ligaturing', function (head, _) node.ligaturing(head) end, 'TeX ligaturing') end local function long_s(lang_name, lang_num) local pattern_file = 'autotype-round-s-de.pat.txt' -- TO DO: adapt to language local scan_node_list = padrinoma.create_node_list_scanner(lang_num or lang_name, pattern_file, 0, 0) -- Remove callback for regular ligaturing if present if luatexbase.in_callback('ligaturing', 'TeX ligaturing') then luatexbase.remove_from_callback('ligaturing', 'TeX ligaturing') end -- Register callback for long s insertion luatexbase.add_to_callback('ligaturing', function (head, _) -- Apply node list manipulation. insert_long_s(head, scan_node_list, lang_name) end, 'autotype long s replacement for '..lang_name) -- Register callback for regular ligaturing luatexbase.add_to_callback('ligaturing', function (head, _) node.ligaturing(head) end, 'TeX ligaturing') end local function get_penalty(i) if i==1 then return PENALTY_I elseif i==2 then return PENALTY_II elseif i==3 then return PENALTY_III end end local autotype = {} autotype.default_hyph = default_hyph autotype.primary_hyph = primary_hyph autotype.special_hyph = special_hyph autotype.weighted_hyph = weighted_hyph autotype.mark_hyph = mark_hyph autotype.lig_supp = lig_supp autotype.long_s = long_s autotype.set_long_s_codepoint = set_long_s_codepoint autotype.set_round_s_codepoint = set_round_s_codepoint autotype.set_final_round_s_codepoint = set_final_round_s_codepoint autotype.get_current_long_s_codepoint = get_current_long_s_codepoint autotype.get_current_round_s_codepoint = get_current_round_s_codepoint autotype.get_penalty = get_penalty return autotype