| Total Complexity | 105 |
| Total Lines | 767 |
| Duplicated Lines | 7.3 % |
| Changes | 0 | ||
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like doorstop.core.publishers.latex often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
| 1 | # SPDX-License-Identifier: LGPL-3.0-only |
||
| 2 | |||
| 3 | """Functions to publish LaTeX documents.""" |
||
| 4 | |||
| 5 | import os |
||
| 6 | import re |
||
| 7 | from typing import List |
||
| 8 | |||
| 9 | from doorstop import common, settings |
||
| 10 | from doorstop.cli import utilities |
||
| 11 | from doorstop.common import DoorstopError |
||
| 12 | from doorstop.core.publishers._latex_functions import ( |
||
| 13 | _add_comment, |
||
| 14 | _check_for_new_table, |
||
| 15 | _fix_table_line, |
||
| 16 | _get_document_attributes, |
||
| 17 | _latex_convert, |
||
| 18 | _typeset_latex_image, |
||
| 19 | ) |
||
| 20 | from doorstop.core.publishers.base import BasePublisher, extract_prefix |
||
| 21 | from doorstop.core.template import check_latex_template_data, read_template_data |
||
| 22 | from doorstop.core.types import is_item, iter_documents, iter_items |
||
| 23 | |||
| 24 | log = common.logger(__name__) |
||
| 25 | |||
| 26 | |||
| 27 | class LaTeXPublisher(BasePublisher): |
||
| 28 | """LaTeX publisher.""" |
||
| 29 | |||
| 30 | def __init__(self, obj, ext): |
||
| 31 | super().__init__(obj, ext) |
||
| 32 | self.END_LONGTABLE = "\\end{longtable}" |
||
| 33 | self.HLINE = "\\hline" |
||
| 34 | self.compile_files = [] |
||
| 35 | self.compile_path = "" |
||
| 36 | # Define lists. |
||
| 37 | self.list["start"] = { |
||
| 38 | "itemize": r"\begin{itemizeDeep}", |
||
| 39 | "enumerate": r"\begin{enumerateDeep}", |
||
| 40 | } |
||
| 41 | self.list["end"] = { |
||
| 42 | "itemize": r"\end{itemizeDeep}", |
||
| 43 | "enumerate": r"\end{enumerateDeep}", |
||
| 44 | } |
||
| 45 | self.list["start_item"] = {"itemize": r"\\item ", "enumerate": r"\\item "} |
||
| 46 | self.list["end_item"] = {"itemize": "", "enumerate": ""} |
||
| 47 | |||
| 48 | def preparePublish(self): |
||
| 49 | """Publish wrapper files for LaTeX.""" |
||
| 50 | log.debug("Generating compile script for LaTeX from %s", self.path) |
||
| 51 | self.compile_path = self._get_compile_path() |
||
| 52 | |||
| 53 | def publishAction(self, document, path): |
||
| 54 | """Add file to compile.sh script.""" |
||
| 55 | self.document = document |
||
| 56 | self.documentPath = path |
||
| 57 | |||
| 58 | log.debug("Generating compile script for LaTeX from %s", self.documentPath) |
||
| 59 | file_to_compile = self._generate_latex_wrapper() |
||
| 60 | self.compile_files.append(file_to_compile) |
||
| 61 | |||
| 62 | def concludePublish(self): |
||
| 63 | """Write out the compile.sh file.""" |
||
| 64 | common.write_lines( |
||
| 65 | self.compile_files, |
||
| 66 | self.compile_path, |
||
| 67 | end=settings.WRITE_LINESEPERATOR, |
||
| 68 | executable=True, |
||
| 69 | ) |
||
| 70 | msg = "You can now execute the file 'compile.sh' twice in the exported folder to produce the PDFs!" |
||
| 71 | utilities.show(msg, flush=True) |
||
| 72 | |||
| 73 | def create_index(self, directory, index=None, extensions=(".html",), tree=None): |
||
| 74 | """No index for LaTeX.""" |
||
| 75 | |||
| 76 | def lines(self, obj, **kwargs): |
||
| 77 | """Yield lines for a LaTeX report. |
||
| 78 | |||
| 79 | :param obj: Item, list of Items, or Document to publish |
||
| 80 | :param linkify: turn links into hyperlinks |
||
| 81 | |||
| 82 | :return: iterator of lines of text |
||
| 83 | |||
| 84 | """ |
||
| 85 | linkify = kwargs.get("linkify", False) |
||
| 86 | for item in iter_items(obj): |
||
| 87 | heading = "\\" + "sub" * (item.depth - 1) + "section*{" |
||
| 88 | heading_level = "\\" + "sub" * (item.depth - 1) + "section{" |
||
| 89 | |||
| 90 | if item.heading: |
||
| 91 | text_lines = item.text.splitlines() |
||
| 92 | if item.header: |
||
| 93 | text_lines.insert(0, item.header) |
||
| 94 | # Level and Text |
||
| 95 | if settings.PUBLISH_HEADING_LEVELS: |
||
| 96 | standard = "{h}{t}{he}".format( |
||
| 97 | h=heading_level, |
||
| 98 | t=_latex_convert(text_lines[0]) if text_lines else "", |
||
| 99 | he="}", |
||
| 100 | ) |
||
| 101 | else: |
||
| 102 | standard = "{h}{t}{he}".format( |
||
| 103 | h=heading, |
||
| 104 | t=_latex_convert(text_lines[0]) if text_lines else "", |
||
| 105 | he="}", |
||
| 106 | ) |
||
| 107 | attr_list = self.format_attr_list(item, True) |
||
| 108 | yield standard + attr_list |
||
| 109 | yield from self._format_latex_text(text_lines[1:]) |
||
| 110 | else: |
||
| 111 | uid = item.uid |
||
| 112 | if settings.ENABLE_HEADERS: |
||
| 113 | if item.header: |
||
| 114 | uid = "{h}{{ - \\small{{}}\\texttt{{}}{u}}}".format( |
||
| 115 | h=_latex_convert(item.header), u=item.uid |
||
| 116 | ) |
||
| 117 | else: |
||
| 118 | uid = "{u}".format(u=item.uid) |
||
| 119 | |||
| 120 | # Level and UID |
||
| 121 | if settings.PUBLISH_BODY_LEVELS: |
||
| 122 | standard = "{h}{u}{he}".format(h=heading_level, u=uid, he="}") |
||
| 123 | else: |
||
| 124 | standard = "{h}{u}{he}".format(h=heading, u=uid, he="}") |
||
| 125 | |||
| 126 | attr_list = self.format_attr_list(item, True) |
||
| 127 | yield standard + attr_list |
||
| 128 | |||
| 129 | # Text |
||
| 130 | if item.text: |
||
| 131 | yield "" # break before text |
||
| 132 | yield from self._format_latex_text(item.text.splitlines()) |
||
| 133 | |||
| 134 | # Reference |
||
| 135 | if item.ref: |
||
| 136 | yield "" # break before reference |
||
| 137 | yield self.format_ref(item) |
||
| 138 | |||
| 139 | # Reference |
||
| 140 | if item.references: |
||
| 141 | yield "" # break before reference |
||
| 142 | yield self.format_references(item) |
||
| 143 | |||
| 144 | # Parent links |
||
| 145 | View Code Duplication | if item.links: |
|
|
|
|||
| 146 | yield "" # break before links |
||
| 147 | items2 = item.parent_items |
||
| 148 | if settings.PUBLISH_CHILD_LINKS: |
||
| 149 | label = "Parent links:" |
||
| 150 | else: |
||
| 151 | label = "Links:" |
||
| 152 | links = self.format_links(items2, linkify) |
||
| 153 | label_links = self.format_label_links(label, links, linkify) |
||
| 154 | yield label_links |
||
| 155 | |||
| 156 | # Child links |
||
| 157 | if settings.PUBLISH_CHILD_LINKS: |
||
| 158 | items2 = item.find_child_items() |
||
| 159 | if items2: |
||
| 160 | yield "" # break before links |
||
| 161 | label = "Child links:" |
||
| 162 | links = self.format_links(items2, linkify) |
||
| 163 | label_links = self.format_label_links(label, links, linkify) |
||
| 164 | yield label_links |
||
| 165 | |||
| 166 | # Add custom publish attributes |
||
| 167 | if item.document and item.document.publish: |
||
| 168 | header_printed = False |
||
| 169 | for attr in item.document.publish: |
||
| 170 | if not item.attribute(attr): |
||
| 171 | continue |
||
| 172 | if not header_printed: |
||
| 173 | header_printed = True |
||
| 174 | yield "\\begin{longtable}{|l|l|}" |
||
| 175 | yield "Attribute & Value\\\\" |
||
| 176 | yield self.HLINE |
||
| 177 | yield "{} & {}".format(attr, item.attribute(attr)) |
||
| 178 | if header_printed: |
||
| 179 | yield self.END_LONGTABLE |
||
| 180 | else: |
||
| 181 | yield "" |
||
| 182 | |||
| 183 | yield "" # break between items |
||
| 184 | |||
| 185 | def format_attr_list(self, item, linkify): |
||
| 186 | """Create a LaTeX attribute list for a heading.""" |
||
| 187 | return ( |
||
| 188 | "{l}{u}{le}{zl}{u}{le}".format( |
||
| 189 | l="\\label{", zl="\\zlabel{", u=item.uid, le="}" |
||
| 190 | ) |
||
| 191 | if linkify |
||
| 192 | else "" |
||
| 193 | ) |
||
| 194 | |||
| 195 | View Code Duplication | def format_ref(self, item): |
|
| 196 | """Format an external reference in LaTeX.""" |
||
| 197 | if settings.CHECK_REF: |
||
| 198 | path, line = item.find_ref() |
||
| 199 | path = path.replace("\\", "/") # always use unix-style paths |
||
| 200 | if line: |
||
| 201 | return ( |
||
| 202 | "\\begin{{quote}} \\verb|{p}| (line {line})\\end{{quote}}".format( |
||
| 203 | p=path, line=line |
||
| 204 | ) |
||
| 205 | ) |
||
| 206 | else: |
||
| 207 | return "\\begin{{quote}} \\verb|{p}|\\end{{quote}}".format(p=path) |
||
| 208 | else: |
||
| 209 | return "\\begin{{quote}} \\verb|{r}|\\end{{quote}}".format(r=item.ref) |
||
| 210 | |||
| 211 | View Code Duplication | def format_references(self, item): |
|
| 212 | """Format an external reference in LaTeX.""" |
||
| 213 | if settings.CHECK_REF: |
||
| 214 | references = item.find_references() |
||
| 215 | text_refs = [] |
||
| 216 | for ref_item in references: |
||
| 217 | path, line = ref_item |
||
| 218 | path = path.replace("\\", "/") # always use unix-style paths |
||
| 219 | |||
| 220 | if line: |
||
| 221 | text_refs.append( |
||
| 222 | "\\begin{{quote}} \\verb|{p}| (line {line})\\end{{quote}}".format( |
||
| 223 | p=path, line=line |
||
| 224 | ) |
||
| 225 | ) |
||
| 226 | else: |
||
| 227 | text_refs.append( |
||
| 228 | "\\begin{{quote}} \\verb|{p}|\\end{{quote}}".format(p=path) |
||
| 229 | ) |
||
| 230 | |||
| 231 | return "\n".join(ref for ref in text_refs) |
||
| 232 | else: |
||
| 233 | references = item.references |
||
| 234 | text_refs = [] |
||
| 235 | for ref_item in references: |
||
| 236 | path = ref_item["path"] |
||
| 237 | path = path.replace("\\", "/") # always use unix-style paths |
||
| 238 | text_refs.append( |
||
| 239 | "\\begin{{quote}} \\verb|{r}|\\end{{quote}}".format(r=path) |
||
| 240 | ) |
||
| 241 | return "\n".join(ref for ref in text_refs) |
||
| 242 | |||
| 243 | def format_links(self, items, linkify, to_html=False): |
||
| 244 | """Format a list of linked items in LaTeX.""" |
||
| 245 | links = [] |
||
| 246 | for item in items: |
||
| 247 | link = self.format_item_link(item, linkify=linkify) |
||
| 248 | links.append(link) |
||
| 249 | return ", ".join(links) |
||
| 250 | |||
| 251 | def format_item_link(self, item, linkify=True): |
||
| 252 | """Format an item link in LaTeX.""" |
||
| 253 | if linkify and is_item(item): |
||
| 254 | if item.header: |
||
| 255 | return "\\hyperref[{u}]{{{u}}}".format(u=item.uid) |
||
| 256 | return "\\hyperref[{u}]{{{u}}}".format(u=item.uid) |
||
| 257 | else: |
||
| 258 | return str(item.uid) # if not `Item`, assume this is an `UnknownItem` |
||
| 259 | |||
| 260 | def format_label_links(self, label, links, linkify): |
||
| 261 | """Join a string of label and links with formatting.""" |
||
| 262 | if linkify: |
||
| 263 | return "\\textbf{{{lb}}} {ls}".format(lb=label, ls=links) |
||
| 264 | else: |
||
| 265 | return "\\textbf{{{lb} {ls}}}".format(lb=label, ls=links) |
||
| 266 | |||
| 267 | def _typeset_latex_table( |
||
| 268 | self, table_match, text, i, line, block, table_found, header_done, end_pipes |
||
| 269 | ): |
||
| 270 | """Typeset tables.""" |
||
| 271 | if not table_found: |
||
| 272 | table_found, header_done, line, end_pipes = _check_for_new_table( |
||
| 273 | table_match, text, i, line, block, table_found, header_done, end_pipes |
||
| 274 | ) |
||
| 275 | else: |
||
| 276 | if not header_done: |
||
| 277 | line = self.HLINE |
||
| 278 | header_done = True |
||
| 279 | else: |
||
| 280 | # Fix the line. |
||
| 281 | line = _fix_table_line(line, end_pipes) |
||
| 282 | return table_found, header_done, line, end_pipes |
||
| 283 | |||
| 284 | def _format_latex_text(self, text): |
||
| 285 | """Fix all general text formatting to use LaTeX-macros.""" |
||
| 286 | block: List[str] |
||
| 287 | block = [] |
||
| 288 | environment_data = {} |
||
| 289 | environment_data["table_found"] = False |
||
| 290 | header_done = False |
||
| 291 | environment_data["code_found"] = False |
||
| 292 | math_found = False |
||
| 293 | environment_data["plantuml_found"] = False |
||
| 294 | plantuml_file = "" |
||
| 295 | plantuml_name = "" |
||
| 296 | plantuml_count = 0 |
||
| 297 | end_pipes = False |
||
| 298 | for i, line in enumerate(text): |
||
| 299 | no_paragraph = False |
||
| 300 | ############################# |
||
| 301 | ## Fix plantuml. |
||
| 302 | ############################# |
||
| 303 | if environment_data["plantuml_found"]: |
||
| 304 | no_paragraph = True |
||
| 305 | if re.findall("^`*plantuml\\s", line): |
||
| 306 | plantuml_count = plantuml_count + 1 |
||
| 307 | plantuml_title = re.search('title="(.*)"', line) |
||
| 308 | if plantuml_title: |
||
| 309 | plantuml_name = str(plantuml_title.groups(0)[0]) |
||
| 310 | else: |
||
| 311 | raise DoorstopError( |
||
| 312 | "'title' is required for plantUML processing in LaTeX." |
||
| 313 | ) |
||
| 314 | plantuml_file = re.sub("\\s", "-", plantuml_name) |
||
| 315 | block.append( |
||
| 316 | r"\hyperref[fig:plant" |
||
| 317 | + str(plantuml_count) |
||
| 318 | + "]{" |
||
| 319 | + plantuml_name |
||
| 320 | + "}" |
||
| 321 | ) |
||
| 322 | line = "\\begin{plantuml}{" + plantuml_file + "}" |
||
| 323 | environment_data["plantuml_found"] = True |
||
| 324 | if re.findall("@enduml", line): |
||
| 325 | block.append(line) |
||
| 326 | block.append("\\end{plantuml}") |
||
| 327 | line = ( |
||
| 328 | "\\process{" |
||
| 329 | + plantuml_file |
||
| 330 | + "}{0.8\\textwidth}{" |
||
| 331 | + plantuml_name |
||
| 332 | + "}" |
||
| 333 | + "{" |
||
| 334 | + str(plantuml_count) |
||
| 335 | + "}" |
||
| 336 | ) |
||
| 337 | environment_data["plantuml_found"] = False |
||
| 338 | # Skip the rest since we are in a plantuml block! |
||
| 339 | if environment_data["plantuml_found"]: |
||
| 340 | block.append(line) |
||
| 341 | # Check for end of file and end all environments. |
||
| 342 | self._check_for_eof( |
||
| 343 | i, |
||
| 344 | block, |
||
| 345 | text, |
||
| 346 | environment_data, |
||
| 347 | plantuml_name, |
||
| 348 | plantuml_file, |
||
| 349 | ) |
||
| 350 | continue |
||
| 351 | |||
| 352 | ############################# |
||
| 353 | ## Fix code blocks. |
||
| 354 | ############################# |
||
| 355 | code_match = re.findall("```", line) |
||
| 356 | if environment_data["code_found"]: |
||
| 357 | no_paragraph = True |
||
| 358 | if code_match: |
||
| 359 | # Check previous line of @enduml. |
||
| 360 | if i > 0: |
||
| 361 | previous_line = text[i - 1] |
||
| 362 | if re.findall("@enduml", previous_line): |
||
| 363 | continue |
||
| 364 | if environment_data["code_found"]: |
||
| 365 | line = "\\end{lstlisting}" |
||
| 366 | environment_data["code_found"] = False |
||
| 367 | else: |
||
| 368 | # Check for language. |
||
| 369 | language = re.search("```(.*)", line) |
||
| 370 | if language and str(language.groups(0)[0]) != "": |
||
| 371 | line = ( |
||
| 372 | "\\begin{lstlisting}[language=" |
||
| 373 | + str(language.groups(0)[0]) |
||
| 374 | + "]" |
||
| 375 | ) |
||
| 376 | else: |
||
| 377 | line = "\\begin{lstlisting}" |
||
| 378 | environment_data["code_found"] = True |
||
| 379 | # Skip the rest since we are in a code block! |
||
| 380 | if environment_data["code_found"]: |
||
| 381 | block.append(line) |
||
| 382 | # Check for end of file and end all environments. |
||
| 383 | self._check_for_eof( |
||
| 384 | i, |
||
| 385 | block, |
||
| 386 | text, |
||
| 387 | environment_data, |
||
| 388 | plantuml_name, |
||
| 389 | plantuml_file, |
||
| 390 | ) |
||
| 391 | continue |
||
| 392 | # Replace ` for inline code, but not if it is already escaped. |
||
| 393 | # First replace escaped inline code. |
||
| 394 | line = re.sub("\\\\`", "##!!TEMPINLINE!!##", line) |
||
| 395 | # Then replace inline code. |
||
| 396 | line = re.sub("`(.+?)`", "\\\\lstinline`\\1`", line) |
||
| 397 | # Then replace escaped inline code back. |
||
| 398 | line = re.sub("##!!TEMPINLINE!!##", "\\\\`{}", line) |
||
| 399 | |||
| 400 | ############################# |
||
| 401 | ## Fix images. |
||
| 402 | ############################# |
||
| 403 | image_match = re.findall(r"!\[(.*)\]\((.*)\)", line) |
||
| 404 | if image_match: |
||
| 405 | line = _typeset_latex_image(image_match, line, block) |
||
| 406 | ############################# |
||
| 407 | ## Fix $ and MATH. |
||
| 408 | ############################# |
||
| 409 | math_match = re.split("\\$\\$", line) |
||
| 410 | if len(math_match) > 1: |
||
| 411 | if math_found and len(math_match) == 2: |
||
| 412 | math_found = False |
||
| 413 | line = math_match[0] + "$" + _latex_convert(math_match[1]) |
||
| 414 | elif len(math_match) == 2: |
||
| 415 | math_found = True |
||
| 416 | line = _latex_convert(math_match[0]) + "$" + math_match[1] |
||
| 417 | elif len(math_match) == 3: |
||
| 418 | line = ( |
||
| 419 | _latex_convert(math_match[0]) |
||
| 420 | + "$" |
||
| 421 | + math_match[1] |
||
| 422 | + "$" |
||
| 423 | + _latex_convert(math_match[2]) |
||
| 424 | ) |
||
| 425 | else: |
||
| 426 | raise DoorstopError( |
||
| 427 | "Cannot handle multiple math environments on one row." |
||
| 428 | ) |
||
| 429 | else: |
||
| 430 | line = _latex_convert(line) |
||
| 431 | # Skip all other changes if in MATH! |
||
| 432 | if math_found: |
||
| 433 | line = line + "\\\\" |
||
| 434 | block.append(line) |
||
| 435 | continue |
||
| 436 | ############################# |
||
| 437 | ## Fix lists. |
||
| 438 | ############################# |
||
| 439 | # Check if we are at the end of the data. |
||
| 440 | if i == len(text) - 1: |
||
| 441 | next_line = "" |
||
| 442 | else: |
||
| 443 | next_line = text[i + 1] |
||
| 444 | (no_paragraph, processed_block, line) = self.process_lists(line, next_line) |
||
| 445 | if processed_block != "": |
||
| 446 | block.append(processed_block) |
||
| 447 | ############################# |
||
| 448 | ## Fix tables. |
||
| 449 | ############################# |
||
| 450 | # Check if line is part of table. |
||
| 451 | table_match = re.findall("\\|", line) |
||
| 452 | if table_match: |
||
| 453 | ( |
||
| 454 | environment_data["table_found"], |
||
| 455 | header_done, |
||
| 456 | line, |
||
| 457 | end_pipes, |
||
| 458 | ) = self._typeset_latex_table( |
||
| 459 | table_match, |
||
| 460 | text, |
||
| 461 | i, |
||
| 462 | line, |
||
| 463 | block, |
||
| 464 | environment_data["table_found"], |
||
| 465 | header_done, |
||
| 466 | end_pipes, |
||
| 467 | ) |
||
| 468 | else: |
||
| 469 | if environment_data["table_found"]: |
||
| 470 | block.append(self.END_LONGTABLE) |
||
| 471 | environment_data["table_found"] = False |
||
| 472 | header_done = False |
||
| 473 | |||
| 474 | # Look ahead for empty line and add paragraph. |
||
| 475 | if i < len(text) - 1: |
||
| 476 | next_line = text[i + 1] |
||
| 477 | if next_line == "" and not re.search("\\\\", line) and not no_paragraph: |
||
| 478 | line = line + "\\\\" |
||
| 479 | |||
| 480 | ############################# |
||
| 481 | ## All done. Add the line. |
||
| 482 | ############################# |
||
| 483 | block.append(line) |
||
| 484 | |||
| 485 | # Check for end of file and end all environments. |
||
| 486 | self._check_for_eof( |
||
| 487 | i, |
||
| 488 | block, |
||
| 489 | text, |
||
| 490 | environment_data, |
||
| 491 | plantuml_name, |
||
| 492 | plantuml_file, |
||
| 493 | ) |
||
| 494 | return block |
||
| 495 | |||
| 496 | def _check_for_eof( |
||
| 497 | self, |
||
| 498 | index, |
||
| 499 | block, |
||
| 500 | text, |
||
| 501 | environment_data, |
||
| 502 | plantuml_name, |
||
| 503 | plantuml_file, |
||
| 504 | ): |
||
| 505 | """Check for end of file and end all unended environments.""" |
||
| 506 | if index == len(text) - 1: |
||
| 507 | if environment_data["code_found"]: |
||
| 508 | block.append("\\end{lstlisting}") |
||
| 509 | if environment_data["plantuml_found"]: |
||
| 510 | block.append("\\end{plantuml}") |
||
| 511 | block.append( |
||
| 512 | "\\process{" |
||
| 513 | + plantuml_file |
||
| 514 | + "}{0.8\\textwidth}{" |
||
| 515 | + plantuml_name |
||
| 516 | + "}" |
||
| 517 | ) |
||
| 518 | if environment_data["table_found"]: |
||
| 519 | block.append(self.END_LONGTABLE) |
||
| 520 | |||
| 521 | def create_matrix(self, directory): |
||
| 522 | """Create a traceability table for LaTeX.""" |
||
| 523 | # Setup. |
||
| 524 | table = self.object.get_traceability().__iter__() |
||
| 525 | traceability = [] |
||
| 526 | file = os.path.join(directory, "traceability.tex") |
||
| 527 | count = 0 |
||
| 528 | # Start the table. |
||
| 529 | table_start = "\\begin{longtable}{" |
||
| 530 | table_head = "" |
||
| 531 | header_data = table.__next__() |
||
| 532 | for column in header_data: |
||
| 533 | count = count + 1 |
||
| 534 | table_start = table_start + "|l" |
||
| 535 | if len(table_head) > 0: |
||
| 536 | table_head = table_head + " & " |
||
| 537 | table_head = table_head + "\\textbf{" + str(column) + "}" |
||
| 538 | table_start = table_start + "|}" |
||
| 539 | table_head = table_head + "\\\\" |
||
| 540 | traceability.append(table_start) |
||
| 541 | traceability.append( |
||
| 542 | "\\caption{Traceability matrix.}\\label{tbl:trace}\\zlabel{tbl:trace}\\\\" |
||
| 543 | ) |
||
| 544 | traceability.append(self.HLINE) |
||
| 545 | traceability.append(table_head) |
||
| 546 | traceability.append(self.HLINE) |
||
| 547 | traceability.append("\\endfirsthead") |
||
| 548 | traceability.append("\\caption{\\textit{(Continued)} Traceability matrix.}\\\\") |
||
| 549 | traceability.append(self.HLINE) |
||
| 550 | traceability.append(table_head) |
||
| 551 | traceability.append(self.HLINE) |
||
| 552 | traceability.append("\\endhead") |
||
| 553 | traceability.append(self.HLINE) |
||
| 554 | traceability.append( |
||
| 555 | "\\multicolumn{{{n}}}{{r}}{{\\textit{{Continued on next page.}}}}\\\\".format( |
||
| 556 | n=count |
||
| 557 | ) |
||
| 558 | ) |
||
| 559 | traceability.append("\\endfoot") |
||
| 560 | traceability.append(self.HLINE) |
||
| 561 | traceability.append("\\endlastfoot") |
||
| 562 | # Add rows. |
||
| 563 | for row in table: |
||
| 564 | row_text = "" |
||
| 565 | for column in row: |
||
| 566 | if len(row_text) > 0: |
||
| 567 | row_text = row_text + " & " |
||
| 568 | if column: |
||
| 569 | row_text = row_text + "\\hyperref[{u}]{{{u}}}".format(u=str(column)) |
||
| 570 | else: |
||
| 571 | row_text = row_text + " " |
||
| 572 | row_text = row_text + "\\\\" |
||
| 573 | traceability.append(row_text) |
||
| 574 | traceability.append(self.HLINE) |
||
| 575 | # End the table. |
||
| 576 | traceability.append(self.END_LONGTABLE) |
||
| 577 | common.write_lines(traceability, file, end=settings.WRITE_LINESEPERATOR) |
||
| 578 | |||
| 579 | def _get_compile_path(self): |
||
| 580 | """Return the path to the compile script.""" |
||
| 581 | head, tail = os.path.split(self.path) |
||
| 582 | # If tail ends with .tex, replace it with compile.sh. |
||
| 583 | if tail.endswith(".tex"): |
||
| 584 | return os.path.join(head, "compile.sh") |
||
| 585 | return os.path.join(self.path, "compile.sh") |
||
| 586 | |||
| 587 | def _generate_latex_wrapper(self): |
||
| 588 | """Generate all wrapper scripts required for typesetting in LaTeX.""" |
||
| 589 | # Check for defined document attributes. |
||
| 590 | doc_attributes = _get_document_attributes(self.document) |
||
| 591 | # Create the wrapper file. |
||
| 592 | head, tail = os.path.split(self.documentPath) |
||
| 593 | if tail != extract_prefix(self.document) + ".tex": |
||
| 594 | log.warning( |
||
| 595 | "LaTeX export does not support custom file names. Change in .doorstop.yml instead." |
||
| 596 | ) |
||
| 597 | tail = doc_attributes["name"] + ".tex" |
||
| 598 | self.documentPath = os.path.join(head, extract_prefix(self.document) + ".tex") |
||
| 599 | wrapperPath = os.path.join(head, tail) |
||
| 600 | # Load template data. |
||
| 601 | templatePath = os.path.abspath(os.path.join(self.assetsPath, "..", "template")) |
||
| 602 | log.info( |
||
| 603 | "Loading template data from {}/{}.yml".format(templatePath, self.template) |
||
| 604 | ) |
||
| 605 | template_data = read_template_data(self.assetsPath, self.template) |
||
| 606 | check_latex_template_data( |
||
| 607 | template_data, "{}/{}.yml".format(templatePath, self.template) |
||
| 608 | ) |
||
| 609 | wrapper = [] |
||
| 610 | wrapper.append( |
||
| 611 | "\\documentclass[%s]{template/%s}" |
||
| 612 | % (", ".join(template_data["documentclass"]), self.template) |
||
| 613 | ) |
||
| 614 | # Add required packages. |
||
| 615 | wrapper = _add_comment( |
||
| 616 | wrapper, |
||
| 617 | "These packages are required.", |
||
| 618 | ) |
||
| 619 | wrapper.append("\\usepackage{enumitem}") |
||
| 620 | wrapper = _add_comment(wrapper, "END required packages.") |
||
| 621 | wrapper.append("") |
||
| 622 | |||
| 623 | # Add required packages from template data. |
||
| 624 | wrapper = _add_comment( |
||
| 625 | wrapper, |
||
| 626 | "These packages were automatically added from the template configuration file.", |
||
| 627 | ) |
||
| 628 | for package, options in template_data["usepackage"].items(): |
||
| 629 | package_line = "\\usepackage" |
||
| 630 | if options: |
||
| 631 | package_line += "[%s]" % ", ".join(options) |
||
| 632 | package_line += "{%s}" % package |
||
| 633 | wrapper.append(package_line) |
||
| 634 | wrapper = _add_comment( |
||
| 635 | wrapper, "END data from the template configuration file." |
||
| 636 | ) |
||
| 637 | wrapper.append("") |
||
| 638 | wrapper = _add_comment( |
||
| 639 | wrapper, |
||
| 640 | "These fields are generated from the default doc attribute in the .doorstop.yml file.", |
||
| 641 | ) |
||
| 642 | wrapper.append( |
||
| 643 | "\\def\\doccopyright{{{n}}}".format( |
||
| 644 | n=_latex_convert(doc_attributes["copyright"]) |
||
| 645 | ) |
||
| 646 | ) |
||
| 647 | wrapper.append( |
||
| 648 | "\\def\\doccategory{{{t}}}".format( |
||
| 649 | t=_latex_convert(extract_prefix(self.document)) |
||
| 650 | ) |
||
| 651 | ) |
||
| 652 | wrapper.append( |
||
| 653 | "\\def\\doctitle{{{n}}}".format(n=_latex_convert(doc_attributes["title"])) |
||
| 654 | ) |
||
| 655 | wrapper.append( |
||
| 656 | "\\def\\docref{{{n}}}".format(n=_latex_convert(doc_attributes["ref"])) |
||
| 657 | ) |
||
| 658 | wrapper.append( |
||
| 659 | "\\def\\docby{{{n}}}".format(n=_latex_convert(doc_attributes["by"])) |
||
| 660 | ) |
||
| 661 | wrapper.append( |
||
| 662 | "\\def\\docissuemajor{{{n}}}".format( |
||
| 663 | n=_latex_convert(doc_attributes["major"]) |
||
| 664 | ) |
||
| 665 | ) |
||
| 666 | wrapper.append( |
||
| 667 | "\\def\\docissueminor{{{n}}}".format( |
||
| 668 | n=_latex_convert(doc_attributes["minor"]) |
||
| 669 | ) |
||
| 670 | ) |
||
| 671 | wrapper = _add_comment(wrapper, "END data from the .doorstop.yml file.") |
||
| 672 | wrapper.append("") |
||
| 673 | |||
| 674 | wrapper = _add_comment( |
||
| 675 | wrapper, |
||
| 676 | "LaTex is limited to four (4) levels of lists. The following code extends this to nine (9) levels.", |
||
| 677 | ) |
||
| 678 | wrapper.append("% ******************************************************") |
||
| 679 | wrapper.append("% Increase nesting level for lists") |
||
| 680 | wrapper.append("% ******************************************************") |
||
| 681 | wrapper.append("\\setlistdepth{9}") |
||
| 682 | wrapper.append("\\newlist{itemizeDeep}{enumerate}{9}") |
||
| 683 | wrapper.append("\\setlist[itemizeDeep,1]{label=\\textbullet}") |
||
| 684 | wrapper.append( |
||
| 685 | "\\setlist[itemizeDeep,2]{label=\\normalfont\\bfseries \\textendash}" |
||
| 686 | ) |
||
| 687 | wrapper.append("\\setlist[itemizeDeep,3]{label=\\textasteriskcentered}") |
||
| 688 | wrapper.append("\\setlist[itemizeDeep,4]{label=\\textperiodcentered}") |
||
| 689 | wrapper.append("\\setlist[itemizeDeep,5]{label=\\textopenbullet}") |
||
| 690 | wrapper.append("\\setlist[itemizeDeep,6]{label=\\textbullet}") |
||
| 691 | wrapper.append( |
||
| 692 | "\\setlist[itemizeDeep,7]{label=\\normalfont\\bfseries \\textendash}" |
||
| 693 | ) |
||
| 694 | wrapper.append("\\setlist[itemizeDeep,8]{label=\\textasteriskcentered}") |
||
| 695 | wrapper.append("\\setlist[itemizeDeep,9]{label=\\textperiodcentered}") |
||
| 696 | wrapper.append("\\newlist{enumerateDeep}{enumerate}{9}") |
||
| 697 | wrapper.append("\\setlist[enumerateDeep]{label*=\\arabic*.}") |
||
| 698 | wrapper = _add_comment(wrapper, "END list depth fix.") |
||
| 699 | wrapper.append("") |
||
| 700 | |||
| 701 | info_text_set = False |
||
| 702 | for external, _ in iter_documents(self.object, self.path, ".tex"): |
||
| 703 | # Check for defined document attributes. |
||
| 704 | external_doc_attributes = _get_document_attributes(external) |
||
| 705 | # Don't add self. |
||
| 706 | if external_doc_attributes["name"] != doc_attributes["name"]: |
||
| 707 | if not info_text_set: |
||
| 708 | wrapper = _add_comment( |
||
| 709 | wrapper, |
||
| 710 | "These are automatically added external references to make cross-references work between the PDFs.", |
||
| 711 | ) |
||
| 712 | info_text_set = True |
||
| 713 | wrapper.append( |
||
| 714 | "\\zexternaldocument{{{n}}}".format( |
||
| 715 | n=external_doc_attributes["name"] |
||
| 716 | ) |
||
| 717 | ) |
||
| 718 | wrapper.append( |
||
| 719 | "\\externaldocument{{{n}}}".format( |
||
| 720 | n=external_doc_attributes["name"] |
||
| 721 | ) |
||
| 722 | ) |
||
| 723 | if info_text_set: |
||
| 724 | wrapper = _add_comment(wrapper, "END external references.") |
||
| 725 | wrapper.append("") |
||
| 726 | wrapper = _add_comment( |
||
| 727 | wrapper, |
||
| 728 | "These lines were automatically added from the template configuration file to allow full customization of the template _before_ \\begin{document}.", |
||
| 729 | ) |
||
| 730 | for line in template_data["before_begin_document"]: |
||
| 731 | wrapper.append(line) |
||
| 732 | wrapper = _add_comment( |
||
| 733 | wrapper, "END custom data from the template configuration file." |
||
| 734 | ) |
||
| 735 | wrapper.append("") |
||
| 736 | wrapper.append("\\begin{document}") |
||
| 737 | wrapper = _add_comment( |
||
| 738 | wrapper, |
||
| 739 | "These lines were automatically added from the template configuration file to allow full customization of the template _after_ \\begin{document}.", |
||
| 740 | ) |
||
| 741 | for line in template_data["after_begin_document"]: |
||
| 742 | wrapper.append(line) |
||
| 743 | wrapper = _add_comment( |
||
| 744 | wrapper, "END custom data from the template configuration file." |
||
| 745 | ) |
||
| 746 | wrapper.append("") |
||
| 747 | wrapper = _add_comment(wrapper, "Load the doorstop data file.") |
||
| 748 | wrapper.append("\\input{{{n}.tex}}".format(n=extract_prefix(self.document))) |
||
| 749 | wrapper = _add_comment(wrapper, "END doorstop data file.") |
||
| 750 | wrapper.append("") |
||
| 751 | # Include traceability matrix |
||
| 752 | if self.matrix: |
||
| 753 | wrapper = _add_comment(wrapper, "Add traceability matrix.") |
||
| 754 | if settings.PUBLISH_HEADING_LEVELS: |
||
| 755 | wrapper.append("\\section{Traceability}") |
||
| 756 | else: |
||
| 757 | wrapper.append("\\section*{Traceability}") |
||
| 758 | wrapper.append("\\input{traceability.tex}") |
||
| 759 | wrapper = _add_comment(wrapper, "END traceability matrix.") |
||
| 760 | wrapper.append("") |
||
| 761 | wrapper.append("\\end{document}") |
||
| 762 | common.write_lines(wrapper, wrapperPath, end=settings.WRITE_LINESEPERATOR) |
||
| 763 | |||
| 764 | # Add to compile.sh as return value. |
||
| 765 | return "pdflatex -halt-on-error -shell-escape {n}.tex".format( |
||
| 766 | n=doc_attributes["name"] |
||
| 767 | ) |
||
| 768 |