[3] | 1 | # Authors: David Goodger, David Priest |
---|
| 2 | # Contact: goodger@python.org |
---|
| 3 | # Revision: $Revision: 3900 $ |
---|
| 4 | # Date: $Date: 2005-09-24 17:11:20 +0200 (Sat, 24 Sep 2005) $ |
---|
| 5 | # Copyright: This module has been placed in the public domain. |
---|
| 6 | |
---|
| 7 | """ |
---|
| 8 | Directives for table elements. |
---|
| 9 | """ |
---|
| 10 | |
---|
| 11 | __docformat__ = 'reStructuredText' |
---|
| 12 | |
---|
| 13 | |
---|
| 14 | import sys |
---|
| 15 | import os.path |
---|
| 16 | from docutils import io, nodes, statemachine, utils |
---|
| 17 | from docutils.utils import SystemMessagePropagation |
---|
| 18 | from docutils.parsers.rst import directives |
---|
| 19 | |
---|
| 20 | try: |
---|
| 21 | import csv # new in Python 2.3 |
---|
| 22 | except ImportError: |
---|
| 23 | csv = None |
---|
| 24 | |
---|
| 25 | try: |
---|
| 26 | import urllib2 |
---|
| 27 | except ImportError: |
---|
| 28 | urllib2 = None |
---|
| 29 | |
---|
| 30 | try: |
---|
| 31 | True |
---|
| 32 | except NameError: # Python 2.2 & 2.1 compatibility |
---|
| 33 | True = not 0 |
---|
| 34 | False = not 1 |
---|
| 35 | |
---|
| 36 | |
---|
| 37 | def table(name, arguments, options, content, lineno, |
---|
| 38 | content_offset, block_text, state, state_machine): |
---|
| 39 | if not content: |
---|
| 40 | warning = state_machine.reporter.warning( |
---|
| 41 | 'Content block expected for the "%s" directive; none found.' |
---|
| 42 | % name, nodes.literal_block(block_text, block_text), |
---|
| 43 | line=lineno) |
---|
| 44 | return [warning] |
---|
| 45 | title, messages = make_title(arguments, state, lineno) |
---|
| 46 | node = nodes.Element() # anonymous container for parsing |
---|
| 47 | state.nested_parse(content, content_offset, node) |
---|
| 48 | if len(node) != 1 or not isinstance(node[0], nodes.table): |
---|
| 49 | error = state_machine.reporter.error( |
---|
| 50 | 'Error parsing content block for the "%s" directive: ' |
---|
| 51 | 'exactly one table expected.' |
---|
| 52 | % name, nodes.literal_block(block_text, block_text), |
---|
| 53 | line=lineno) |
---|
| 54 | return [error] |
---|
| 55 | table_node = node[0] |
---|
| 56 | table_node['classes'] += options.get('class', []) |
---|
| 57 | if title: |
---|
| 58 | table_node.insert(0, title) |
---|
| 59 | return [table_node] + messages |
---|
| 60 | |
---|
| 61 | table.arguments = (0, 1, 1) |
---|
| 62 | table.options = {'class': directives.class_option} |
---|
| 63 | table.content = 1 |
---|
| 64 | |
---|
| 65 | def make_title(arguments, state, lineno): |
---|
| 66 | if arguments: |
---|
| 67 | title_text = arguments[0] |
---|
| 68 | text_nodes, messages = state.inline_text(title_text, lineno) |
---|
| 69 | title = nodes.title(title_text, '', *text_nodes) |
---|
| 70 | else: |
---|
| 71 | title = None |
---|
| 72 | messages = [] |
---|
| 73 | return title, messages |
---|
| 74 | |
---|
| 75 | |
---|
| 76 | if csv: |
---|
| 77 | class DocutilsDialect(csv.Dialect): |
---|
| 78 | |
---|
| 79 | """CSV dialect for `csv_table` directive function.""" |
---|
| 80 | |
---|
| 81 | delimiter = ',' |
---|
| 82 | quotechar = '"' |
---|
| 83 | doublequote = True |
---|
| 84 | skipinitialspace = True |
---|
| 85 | lineterminator = '\n' |
---|
| 86 | quoting = csv.QUOTE_MINIMAL |
---|
| 87 | |
---|
| 88 | def __init__(self, options): |
---|
| 89 | if options.has_key('delim'): |
---|
| 90 | self.delimiter = str(options['delim']) |
---|
| 91 | if options.has_key('keepspace'): |
---|
| 92 | self.skipinitialspace = False |
---|
| 93 | if options.has_key('quote'): |
---|
| 94 | self.quotechar = str(options['quote']) |
---|
| 95 | if options.has_key('escape'): |
---|
| 96 | self.doublequote = False |
---|
| 97 | self.escapechar = str(options['escape']) |
---|
| 98 | csv.Dialect.__init__(self) |
---|
| 99 | |
---|
| 100 | |
---|
| 101 | class HeaderDialect(csv.Dialect): |
---|
| 102 | |
---|
| 103 | """CSV dialect to use for the "header" option data.""" |
---|
| 104 | |
---|
| 105 | delimiter = ',' |
---|
| 106 | quotechar = '"' |
---|
| 107 | escapechar = '\\' |
---|
| 108 | doublequote = False |
---|
| 109 | skipinitialspace = True |
---|
| 110 | lineterminator = '\n' |
---|
| 111 | quoting = csv.QUOTE_MINIMAL |
---|
| 112 | |
---|
| 113 | |
---|
| 114 | def csv_table(name, arguments, options, content, lineno, |
---|
| 115 | content_offset, block_text, state, state_machine): |
---|
| 116 | try: |
---|
| 117 | if ( not state.document.settings.file_insertion_enabled |
---|
| 118 | and (options.has_key('file') or options.has_key('url')) ): |
---|
| 119 | warning = state_machine.reporter.warning( |
---|
| 120 | 'File and URL access deactivated; ignoring "%s" directive.' % |
---|
| 121 | name, nodes.literal_block(block_text,block_text), line=lineno) |
---|
| 122 | return [warning] |
---|
| 123 | check_requirements(name, lineno, block_text, state_machine) |
---|
| 124 | title, messages = make_title(arguments, state, lineno) |
---|
| 125 | csv_data, source = get_csv_data( |
---|
| 126 | name, options, content, lineno, block_text, state, state_machine) |
---|
| 127 | table_head, max_header_cols = process_header_option( |
---|
| 128 | options, state_machine, lineno) |
---|
| 129 | rows, max_cols = parse_csv_data_into_rows( |
---|
| 130 | csv_data, DocutilsDialect(options), source, options) |
---|
| 131 | max_cols = max(max_cols, max_header_cols) |
---|
| 132 | header_rows = options.get('header-rows', 0) # default 0 |
---|
| 133 | stub_columns = options.get('stub-columns', 0) # default 0 |
---|
| 134 | check_table_dimensions( |
---|
| 135 | rows, header_rows, stub_columns, name, lineno, |
---|
| 136 | block_text, state_machine) |
---|
| 137 | table_head.extend(rows[:header_rows]) |
---|
| 138 | table_body = rows[header_rows:] |
---|
| 139 | col_widths = get_column_widths( |
---|
| 140 | max_cols, name, options, lineno, block_text, state_machine) |
---|
| 141 | extend_short_rows_with_empty_cells(max_cols, (table_head, table_body)) |
---|
| 142 | except SystemMessagePropagation, detail: |
---|
| 143 | return [detail.args[0]] |
---|
| 144 | except csv.Error, detail: |
---|
| 145 | error = state_machine.reporter.error( |
---|
| 146 | 'Error with CSV data in "%s" directive:\n%s' % (name, detail), |
---|
| 147 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 148 | return [error] |
---|
| 149 | table = (col_widths, table_head, table_body) |
---|
| 150 | table_node = state.build_table(table, content_offset, stub_columns) |
---|
| 151 | table_node['classes'] += options.get('class', []) |
---|
| 152 | if title: |
---|
| 153 | table_node.insert(0, title) |
---|
| 154 | return [table_node] + messages |
---|
| 155 | |
---|
| 156 | csv_table.arguments = (0, 1, 1) |
---|
| 157 | csv_table.options = {'header-rows': directives.nonnegative_int, |
---|
| 158 | 'stub-columns': directives.nonnegative_int, |
---|
| 159 | 'header': directives.unchanged, |
---|
| 160 | 'widths': directives.positive_int_list, |
---|
| 161 | 'file': directives.path, |
---|
| 162 | 'url': directives.uri, |
---|
| 163 | 'encoding': directives.encoding, |
---|
| 164 | 'class': directives.class_option, |
---|
| 165 | # field delimiter char |
---|
| 166 | 'delim': directives.single_char_or_whitespace_or_unicode, |
---|
| 167 | # treat whitespace after delimiter as significant |
---|
| 168 | 'keepspace': directives.flag, |
---|
| 169 | # text field quote/unquote char: |
---|
| 170 | 'quote': directives.single_char_or_unicode, |
---|
| 171 | # char used to escape delim & quote as-needed: |
---|
| 172 | 'escape': directives.single_char_or_unicode,} |
---|
| 173 | csv_table.content = 1 |
---|
| 174 | |
---|
| 175 | def check_requirements(name, lineno, block_text, state_machine): |
---|
| 176 | if not csv: |
---|
| 177 | error = state_machine.reporter.error( |
---|
| 178 | 'The "%s" directive is not compatible with this version of ' |
---|
| 179 | 'Python (%s). Requires the "csv" module, new in Python 2.3.' |
---|
| 180 | % (name, sys.version.split()[0]), |
---|
| 181 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 182 | raise SystemMessagePropagation(error) |
---|
| 183 | |
---|
| 184 | def get_csv_data(name, options, content, lineno, block_text, |
---|
| 185 | state, state_machine): |
---|
| 186 | """ |
---|
| 187 | CSV data can come from the directive content, from an external file, or |
---|
| 188 | from a URL reference. |
---|
| 189 | """ |
---|
| 190 | encoding = options.get('encoding', state.document.settings.input_encoding) |
---|
| 191 | if content: # CSV data is from directive content |
---|
| 192 | if options.has_key('file') or options.has_key('url'): |
---|
| 193 | error = state_machine.reporter.error( |
---|
| 194 | '"%s" directive may not both specify an external file and ' |
---|
| 195 | 'have content.' % name, |
---|
| 196 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 197 | raise SystemMessagePropagation(error) |
---|
| 198 | source = content.source(0) |
---|
| 199 | csv_data = content |
---|
| 200 | elif options.has_key('file'): # CSV data is from an external file |
---|
| 201 | if options.has_key('url'): |
---|
| 202 | error = state_machine.reporter.error( |
---|
| 203 | 'The "file" and "url" options may not be simultaneously ' |
---|
| 204 | 'specified for the "%s" directive.' % name, |
---|
| 205 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 206 | raise SystemMessagePropagation(error) |
---|
| 207 | source_dir = os.path.dirname( |
---|
| 208 | os.path.abspath(state.document.current_source)) |
---|
| 209 | source = os.path.normpath(os.path.join(source_dir, options['file'])) |
---|
| 210 | source = utils.relative_path(None, source) |
---|
| 211 | try: |
---|
| 212 | state.document.settings.record_dependencies.add(source) |
---|
| 213 | csv_file = io.FileInput( |
---|
| 214 | source_path=source, encoding=encoding, |
---|
| 215 | error_handler |
---|
| 216 | =state.document.settings.input_encoding_error_handler, |
---|
| 217 | handle_io_errors=None) |
---|
| 218 | csv_data = csv_file.read().splitlines() |
---|
| 219 | except IOError, error: |
---|
| 220 | severe = state_machine.reporter.severe( |
---|
| 221 | 'Problems with "%s" directive path:\n%s.' % (name, error), |
---|
| 222 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 223 | raise SystemMessagePropagation(severe) |
---|
| 224 | elif options.has_key('url'): # CSV data is from a URL |
---|
| 225 | if not urllib2: |
---|
| 226 | severe = state_machine.reporter.severe( |
---|
| 227 | 'Problems with the "%s" directive and its "url" option: ' |
---|
| 228 | 'unable to access the required functionality (from the ' |
---|
| 229 | '"urllib2" module).' % name, |
---|
| 230 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 231 | raise SystemMessagePropagation(severe) |
---|
| 232 | source = options['url'] |
---|
| 233 | try: |
---|
| 234 | csv_text = urllib2.urlopen(source).read() |
---|
| 235 | except (urllib2.URLError, IOError, OSError, ValueError), error: |
---|
| 236 | severe = state_machine.reporter.severe( |
---|
| 237 | 'Problems with "%s" directive URL "%s":\n%s.' |
---|
| 238 | % (name, options['url'], error), |
---|
| 239 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 240 | raise SystemMessagePropagation(severe) |
---|
| 241 | csv_file = io.StringInput( |
---|
| 242 | source=csv_text, source_path=source, encoding=encoding, |
---|
| 243 | error_handler=state.document.settings.input_encoding_error_handler) |
---|
| 244 | csv_data = csv_file.read().splitlines() |
---|
| 245 | else: |
---|
| 246 | error = state_machine.reporter.warning( |
---|
| 247 | 'The "%s" directive requires content; none supplied.' % (name), |
---|
| 248 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 249 | raise SystemMessagePropagation(error) |
---|
| 250 | return csv_data, source |
---|
| 251 | |
---|
| 252 | def process_header_option(options, state_machine, lineno): |
---|
| 253 | source = state_machine.get_source(lineno - 1) |
---|
| 254 | table_head = [] |
---|
| 255 | max_header_cols = 0 |
---|
| 256 | if options.has_key('header'): # separate table header in option |
---|
| 257 | rows, max_header_cols = parse_csv_data_into_rows( |
---|
| 258 | options['header'].split('\n'), HeaderDialect(), source, options) |
---|
| 259 | table_head.extend(rows) |
---|
| 260 | return table_head, max_header_cols |
---|
| 261 | |
---|
| 262 | def parse_csv_data_into_rows(csv_data, dialect, source, options): |
---|
| 263 | # csv.py doesn't do Unicode; encode temporarily as UTF-8 |
---|
| 264 | csv_reader = csv.reader([line.encode('utf-8') for line in csv_data], |
---|
| 265 | dialect=dialect) |
---|
| 266 | rows = [] |
---|
| 267 | max_cols = 0 |
---|
| 268 | for row in csv_reader: |
---|
| 269 | row_data = [] |
---|
| 270 | for cell in row: |
---|
| 271 | # decode UTF-8 back to Unicode |
---|
| 272 | cell_text = unicode(cell, 'utf-8') |
---|
| 273 | cell_data = (0, 0, 0, statemachine.StringList( |
---|
| 274 | cell_text.splitlines(), source=source)) |
---|
| 275 | row_data.append(cell_data) |
---|
| 276 | rows.append(row_data) |
---|
| 277 | max_cols = max(max_cols, len(row)) |
---|
| 278 | return rows, max_cols |
---|
| 279 | |
---|
| 280 | def check_table_dimensions(rows, header_rows, stub_columns, name, lineno, |
---|
| 281 | block_text, state_machine): |
---|
| 282 | if len(rows) < header_rows: |
---|
| 283 | error = state_machine.reporter.error( |
---|
| 284 | '%s header row(s) specified but only %s row(s) of data supplied ' |
---|
| 285 | '("%s" directive).' % (header_rows, len(rows), name), |
---|
| 286 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 287 | raise SystemMessagePropagation(error) |
---|
| 288 | if len(rows) == header_rows > 0: |
---|
| 289 | error = state_machine.reporter.error( |
---|
| 290 | 'Insufficient data supplied (%s row(s)); no data remaining for ' |
---|
| 291 | 'table body, required by "%s" directive.' % (len(rows), name), |
---|
| 292 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 293 | raise SystemMessagePropagation(error) |
---|
| 294 | for row in rows: |
---|
| 295 | if len(row) < stub_columns: |
---|
| 296 | error = state_machine.reporter.error( |
---|
| 297 | '%s stub column(s) specified but only %s columns(s) of data ' |
---|
| 298 | 'supplied ("%s" directive).' % (stub_columns, len(row), name), |
---|
| 299 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 300 | raise SystemMessagePropagation(error) |
---|
| 301 | if len(row) == stub_columns > 0: |
---|
| 302 | error = state_machine.reporter.error( |
---|
| 303 | 'Insufficient data supplied (%s columns(s)); no data remaining ' |
---|
| 304 | 'for table body, required by "%s" directive.' |
---|
| 305 | % (len(row), name), |
---|
| 306 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 307 | raise SystemMessagePropagation(error) |
---|
| 308 | |
---|
| 309 | def get_column_widths(max_cols, name, options, lineno, block_text, |
---|
| 310 | state_machine): |
---|
| 311 | if options.has_key('widths'): |
---|
| 312 | col_widths = options['widths'] |
---|
| 313 | if len(col_widths) != max_cols: |
---|
| 314 | error = state_machine.reporter.error( |
---|
| 315 | '"%s" widths do not match the number of columns in table (%s).' |
---|
| 316 | % (name, max_cols), |
---|
| 317 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 318 | raise SystemMessagePropagation(error) |
---|
| 319 | elif max_cols: |
---|
| 320 | col_widths = [100 / max_cols] * max_cols |
---|
| 321 | else: |
---|
| 322 | error = state_machine.reporter.error( |
---|
| 323 | 'No table data detected in CSV file.', |
---|
| 324 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 325 | raise SystemMessagePropagation(error) |
---|
| 326 | return col_widths |
---|
| 327 | |
---|
| 328 | def extend_short_rows_with_empty_cells(columns, parts): |
---|
| 329 | for part in parts: |
---|
| 330 | for row in part: |
---|
| 331 | if len(row) < columns: |
---|
| 332 | row.extend([(0, 0, 0, [])] * (columns - len(row))) |
---|
| 333 | |
---|
| 334 | def list_table(name, arguments, options, content, lineno, |
---|
| 335 | content_offset, block_text, state, state_machine): |
---|
| 336 | """ |
---|
| 337 | Implement tables whose data is encoded as a uniform two-level bullet list. |
---|
| 338 | For further ideas, see |
---|
| 339 | http://docutils.sf.net/docs/dev/rst/alternatives.html#list-driven-tables |
---|
| 340 | """ |
---|
| 341 | if not content: |
---|
| 342 | error = state_machine.reporter.error( |
---|
| 343 | 'The "%s" directive is empty; content required.' % name, |
---|
| 344 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 345 | return [error] |
---|
| 346 | title, messages = make_title(arguments, state, lineno) |
---|
| 347 | node = nodes.Element() # anonymous container for parsing |
---|
| 348 | state.nested_parse(content, content_offset, node) |
---|
| 349 | try: |
---|
| 350 | num_cols, col_widths = check_list_content( |
---|
| 351 | node, name, options, content, lineno, block_text, state_machine) |
---|
| 352 | table_data = [[item.children for item in row_list[0]] |
---|
| 353 | for row_list in node[0]] |
---|
| 354 | header_rows = options.get('header-rows', 0) # default 0 |
---|
| 355 | stub_columns = options.get('stub-columns', 0) # default 0 |
---|
| 356 | check_table_dimensions( |
---|
| 357 | table_data, header_rows, stub_columns, name, lineno, |
---|
| 358 | block_text, state_machine) |
---|
| 359 | except SystemMessagePropagation, detail: |
---|
| 360 | return [detail.args[0]] |
---|
| 361 | table_node = build_table_from_list(table_data, col_widths, |
---|
| 362 | header_rows, stub_columns) |
---|
| 363 | table_node['classes'] += options.get('class', []) |
---|
| 364 | if title: |
---|
| 365 | table_node.insert(0, title) |
---|
| 366 | return [table_node] + messages |
---|
| 367 | |
---|
| 368 | list_table.arguments = (0, 1, 1) |
---|
| 369 | list_table.options = {'header-rows': directives.nonnegative_int, |
---|
| 370 | 'stub-columns': directives.nonnegative_int, |
---|
| 371 | 'widths': directives.positive_int_list, |
---|
| 372 | 'class': directives.class_option} |
---|
| 373 | list_table.content = 1 |
---|
| 374 | |
---|
| 375 | def check_list_content(node, name, options, content, lineno, block_text, |
---|
| 376 | state_machine): |
---|
| 377 | if len(node) != 1 or not isinstance(node[0], nodes.bullet_list): |
---|
| 378 | error = state_machine.reporter.error( |
---|
| 379 | 'Error parsing content block for the "%s" directive: ' |
---|
| 380 | 'exactly one bullet list expected.' % name, |
---|
| 381 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 382 | raise SystemMessagePropagation(error) |
---|
| 383 | list_node = node[0] |
---|
| 384 | # Check for a uniform two-level bullet list: |
---|
| 385 | for item_index in range(len(list_node)): |
---|
| 386 | item = list_node[item_index] |
---|
| 387 | if len(item) != 1 or not isinstance(item[0], nodes.bullet_list): |
---|
| 388 | error = state_machine.reporter.error( |
---|
| 389 | 'Error parsing content block for the "%s" directive: ' |
---|
| 390 | 'two-level bullet list expected, but row %s does not contain ' |
---|
| 391 | 'a second-level bullet list.' % (name, item_index + 1), |
---|
| 392 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 393 | raise SystemMessagePropagation(error) |
---|
| 394 | elif item_index: |
---|
| 395 | # ATTN pychecker users: num_cols is guaranteed to be set in the |
---|
| 396 | # "else" clause below for item_index==0, before this branch is |
---|
| 397 | # triggered. |
---|
| 398 | if len(item[0]) != num_cols: |
---|
| 399 | error = state_machine.reporter.error( |
---|
| 400 | 'Error parsing content block for the "%s" directive: ' |
---|
| 401 | 'uniform two-level bullet list expected, but row %s does ' |
---|
| 402 | 'not contain the same number of items as row 1 (%s vs %s).' |
---|
| 403 | % (name, item_index + 1, len(item[0]), num_cols), |
---|
| 404 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 405 | raise SystemMessagePropagation(error) |
---|
| 406 | else: |
---|
| 407 | num_cols = len(item[0]) |
---|
| 408 | col_widths = get_column_widths( |
---|
| 409 | num_cols, name, options, lineno, block_text, state_machine) |
---|
| 410 | if len(col_widths) != num_cols: |
---|
| 411 | error = state_machine.reporter.error( |
---|
| 412 | 'Error parsing "widths" option of the "%s" directive: ' |
---|
| 413 | 'number of columns does not match the table data (%s vs %s).' |
---|
| 414 | % (name, len(col_widths), num_cols), |
---|
| 415 | nodes.literal_block(block_text, block_text), line=lineno) |
---|
| 416 | raise SystemMessagePropagation(error) |
---|
| 417 | return num_cols, col_widths |
---|
| 418 | |
---|
| 419 | def build_table_from_list(table_data, col_widths, header_rows, stub_columns): |
---|
| 420 | table = nodes.table() |
---|
| 421 | tgroup = nodes.tgroup(cols=len(col_widths)) |
---|
| 422 | table += tgroup |
---|
| 423 | for col_width in col_widths: |
---|
| 424 | colspec = nodes.colspec(colwidth=col_width) |
---|
| 425 | if stub_columns: |
---|
| 426 | colspec.attributes['stub'] = 1 |
---|
| 427 | stub_columns -= 1 |
---|
| 428 | tgroup += colspec |
---|
| 429 | rows = [] |
---|
| 430 | for row in table_data: |
---|
| 431 | row_node = nodes.row() |
---|
| 432 | for cell in row: |
---|
| 433 | entry = nodes.entry() |
---|
| 434 | entry += cell |
---|
| 435 | row_node += entry |
---|
| 436 | rows.append(row_node) |
---|
| 437 | if header_rows: |
---|
| 438 | thead = nodes.thead() |
---|
| 439 | thead.extend(rows[:header_rows]) |
---|
| 440 | tgroup += thead |
---|
| 441 | tbody = nodes.tbody() |
---|
| 442 | tbody.extend(rows[header_rows:]) |
---|
| 443 | tgroup += tbody |
---|
| 444 | return table |
---|