"Das U-Boot" Source Tree
1# SPDX-License-Identifier: GPL-2.0+
2# Copyright (c) 2016 Google, Inc
3# Written by Simon Glass <sjg@chromium.org>
4#
5# Creates binary images from input files controlled by a description
6#
7
8from collections import OrderedDict
9import glob
10try:
11 import importlib.resources
12except ImportError: # pragma: no cover
13 # for Python 3.6
14 import importlib_resources
15import os
16import pkg_resources
17import re
18
19import sys
20
21from binman import bintool
22from binman import cbfs_util
23from binman import elf
24from binman import entry
25from dtoc import fdt_util
26from u_boot_pylib import command
27from u_boot_pylib import tools
28from u_boot_pylib import tout
29
30# These are imported if needed since they import libfdt
31state = None
32Image = None
33
34# List of images we plan to create
35# Make this global so that it can be referenced from tests
36images = OrderedDict()
37
38# Help text for each type of missing blob, dict:
39# key: Value of the entry's 'missing-msg' or entry name
40# value: Text for the help
41missing_blob_help = {}
42
43def _ReadImageDesc(binman_node, use_expanded):
44 """Read the image descriptions from the /binman node
45
46 This normally produces a single Image object called 'image'. But if
47 multiple images are present, they will all be returned.
48
49 Args:
50 binman_node: Node object of the /binman node
51 use_expanded: True if the FDT will be updated with the entry information
52 Returns:
53 OrderedDict of Image objects, each of which describes an image
54 """
55 # For Image()
56 # pylint: disable=E1102
57 images = OrderedDict()
58 if 'multiple-images' in binman_node.props:
59 for node in binman_node.subnodes:
60 if not node.name.startswith('template'):
61 images[node.name] = Image(node.name, node,
62 use_expanded=use_expanded)
63 else:
64 images['image'] = Image('image', binman_node, use_expanded=use_expanded)
65 return images
66
67def _FindBinmanNode(dtb):
68 """Find the 'binman' node in the device tree
69
70 Args:
71 dtb: Fdt object to scan
72 Returns:
73 Node object of /binman node, or None if not found
74 """
75 for node in dtb.GetRoot().subnodes:
76 if node.name == 'binman':
77 return node
78 return None
79
80def _ReadMissingBlobHelp():
81 """Read the missing-blob-help file
82
83 This file containins help messages explaining what to do when external blobs
84 are missing.
85
86 Returns:
87 Dict:
88 key: Message tag (str)
89 value: Message text (str)
90 """
91
92 def _FinishTag(tag, msg, result):
93 if tag:
94 result[tag] = msg.rstrip()
95 tag = None
96 msg = ''
97 return tag, msg
98
99 my_data = pkg_resources.resource_string(__name__, 'missing-blob-help')
100 re_tag = re.compile('^([-a-z0-9]+):$')
101 result = {}
102 tag = None
103 msg = ''
104 for line in my_data.decode('utf-8').splitlines():
105 if not line.startswith('#'):
106 m_tag = re_tag.match(line)
107 if m_tag:
108 _, msg = _FinishTag(tag, msg, result)
109 tag = m_tag.group(1)
110 elif tag:
111 msg += line + '\n'
112 _FinishTag(tag, msg, result)
113 return result
114
115def _ShowBlobHelp(level, path, text, fname):
116 tout.do_output(level, '%s (%s):' % (path, fname))
117 for line in text.splitlines():
118 tout.do_output(level, ' %s' % line)
119 tout.do_output(level, '')
120
121def _ShowHelpForMissingBlobs(level, missing_list):
122 """Show help for each missing blob to help the user take action
123
124 Args:
125 missing_list: List of Entry objects to show help for
126 """
127 global missing_blob_help
128
129 if not missing_blob_help:
130 missing_blob_help = _ReadMissingBlobHelp()
131
132 for entry in missing_list:
133 tags = entry.GetHelpTags()
134
135 # Show the first match help message
136 shown_help = False
137 for tag in tags:
138 if tag in missing_blob_help:
139 _ShowBlobHelp(level, entry._node.path, missing_blob_help[tag],
140 entry.GetDefaultFilename())
141 shown_help = True
142 break
143 # Or a generic help message
144 if not shown_help:
145 _ShowBlobHelp(level, entry._node.path, "Missing blob",
146 entry.GetDefaultFilename())
147
148def GetEntryModules(include_testing=True):
149 """Get a set of entry class implementations
150
151 Returns:
152 Set of paths to entry class filenames
153 """
154 glob_list = pkg_resources.resource_listdir(__name__, 'etype')
155 glob_list = [fname for fname in glob_list if fname.endswith('.py')]
156 return set([os.path.splitext(os.path.basename(item))[0]
157 for item in glob_list
158 if include_testing or '_testing' not in item])
159
160def WriteEntryDocs(modules, test_missing=None):
161 """Write out documentation for all entries
162
163 Args:
164 modules: List of Module objects to get docs for
165 test_missing: Used for testing only, to force an entry's documentation
166 to show as missing even if it is present. Should be set to None in
167 normal use.
168 """
169 from binman.entry import Entry
170 Entry.WriteDocs(modules, test_missing)
171
172
173def write_bintool_docs(modules, test_missing=None):
174 """Write out documentation for all bintools
175
176 Args:
177 modules: List of Module objects to get docs for
178 test_missing: Used for testing only, to force an entry's documentation
179 to show as missing even if it is present. Should be set to None in
180 normal use.
181 """
182 bintool.Bintool.WriteDocs(modules, test_missing)
183
184
185def ListEntries(image_fname, entry_paths):
186 """List the entries in an image
187
188 This decodes the supplied image and displays a table of entries from that
189 image, preceded by a header.
190
191 Args:
192 image_fname: Image filename to process
193 entry_paths: List of wildcarded paths (e.g. ['*dtb*', 'u-boot*',
194 'section/u-boot'])
195 """
196 image = Image.FromFile(image_fname)
197
198 entries, lines, widths = image.GetListEntries(entry_paths)
199
200 num_columns = len(widths)
201 for linenum, line in enumerate(lines):
202 if linenum == 1:
203 # Print header line
204 print('-' * (sum(widths) + num_columns * 2))
205 out = ''
206 for i, item in enumerate(line):
207 width = -widths[i]
208 if item.startswith('>'):
209 width = -width
210 item = item[1:]
211 txt = '%*s ' % (width, item)
212 out += txt
213 print(out.rstrip())
214
215
216def ReadEntry(image_fname, entry_path, decomp=True):
217 """Extract an entry from an image
218
219 This extracts the data from a particular entry in an image
220
221 Args:
222 image_fname: Image filename to process
223 entry_path: Path to entry to extract
224 decomp: True to return uncompressed data, if the data is compress
225 False to return the raw data
226
227 Returns:
228 data extracted from the entry
229 """
230 global Image
231 from binman.image import Image
232
233 image = Image.FromFile(image_fname)
234 image.CollectBintools()
235 entry = image.FindEntryPath(entry_path)
236 return entry.ReadData(decomp)
237
238
239def ShowAltFormats(image):
240 """Show alternative formats available for entries in the image
241
242 This shows a list of formats available.
243
244 Args:
245 image (Image): Image to check
246 """
247 alt_formats = {}
248 image.CheckAltFormats(alt_formats)
249 print('%-10s %-20s %s' % ('Flag (-F)', 'Entry type', 'Description'))
250 for name, val in alt_formats.items():
251 entry, helptext = val
252 print('%-10s %-20s %s' % (name, entry.etype, helptext))
253
254
255def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
256 decomp=True, alt_format=None):
257 """Extract the data from one or more entries and write it to files
258
259 Args:
260 image_fname: Image filename to process
261 output_fname: Single output filename to use if extracting one file, None
262 otherwise
263 outdir: Output directory to use (for any number of files), else None
264 entry_paths: List of entry paths to extract
265 decomp: True to decompress the entry data
266
267 Returns:
268 List of EntryInfo records that were written
269 """
270 image = Image.FromFile(image_fname)
271 image.CollectBintools()
272
273 if alt_format == 'list':
274 ShowAltFormats(image)
275 return
276
277 # Output an entry to a single file, as a special case
278 if output_fname:
279 if not entry_paths:
280 raise ValueError('Must specify an entry path to write with -f')
281 if len(entry_paths) != 1:
282 raise ValueError('Must specify exactly one entry path to write with -f')
283 entry = image.FindEntryPath(entry_paths[0])
284 data = entry.ReadData(decomp, alt_format)
285 tools.write_file(output_fname, data)
286 tout.notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
287 return
288
289 # Otherwise we will output to a path given by the entry path of each entry.
290 # This means that entries will appear in subdirectories if they are part of
291 # a sub-section.
292 einfos = image.GetListEntries(entry_paths)[0]
293 tout.notice('%d entries match and will be written' % len(einfos))
294 for einfo in einfos:
295 entry = einfo.entry
296 data = entry.ReadData(decomp, alt_format)
297 path = entry.GetPath()[1:]
298 fname = os.path.join(outdir, path)
299
300 # If this entry has children, create a directory for it and put its
301 # data in a file called 'root' in that directory
302 if entry.GetEntries():
303 if fname and not os.path.exists(fname):
304 os.makedirs(fname)
305 fname = os.path.join(fname, 'root')
306 tout.notice("Write entry '%s' size %x to '%s'" %
307 (entry.GetPath(), len(data), fname))
308 tools.write_file(fname, data)
309 return einfos
310
311
312def BeforeReplace(image, allow_resize):
313 """Handle getting an image ready for replacing entries in it
314
315 Args:
316 image: Image to prepare
317 """
318 state.PrepareFromLoadedData(image)
319 image.CollectBintools()
320 image.LoadData(decomp=False)
321
322 # If repacking, drop the old offset/size values except for the original
323 # ones, so we are only left with the constraints.
324 if image.allow_repack and allow_resize:
325 image.ResetForPack()
326
327
328def ReplaceOneEntry(image, entry, data, do_compress, allow_resize):
329 """Handle replacing a single entry an an image
330
331 Args:
332 image: Image to update
333 entry: Entry to write
334 data: Data to replace with
335 do_compress: True to compress the data if needed, False if data is
336 already compressed so should be used as is
337 allow_resize: True to allow entries to change size (this does a re-pack
338 of the entries), False to raise an exception
339 """
340 if not entry.WriteData(data, do_compress):
341 if not image.allow_repack:
342 entry.Raise('Entry data size does not match, but allow-repack is not present for this image')
343 if not allow_resize:
344 entry.Raise('Entry data size does not match, but resize is disabled')
345
346
347def AfterReplace(image, allow_resize, write_map):
348 """Handle write out an image after replacing entries in it
349
350 Args:
351 image: Image to write
352 allow_resize: True to allow entries to change size (this does a re-pack
353 of the entries), False to raise an exception
354 write_map: True to write a map file
355 """
356 tout.info('Processing image')
357 ProcessImage(image, update_fdt=True, write_map=write_map,
358 get_contents=False, allow_resize=allow_resize)
359
360
361def WriteEntryToImage(image, entry, data, do_compress=True, allow_resize=True,
362 write_map=False):
363 BeforeReplace(image, allow_resize)
364 tout.info('Writing data to %s' % entry.GetPath())
365 ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
366 AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
367
368
369def WriteEntry(image_fname, entry_path, data, do_compress=True,
370 allow_resize=True, write_map=False):
371 """Replace an entry in an image
372
373 This replaces the data in a particular entry in an image. This size of the
374 new data must match the size of the old data unless allow_resize is True.
375
376 Args:
377 image_fname: Image filename to process
378 entry_path: Path to entry to extract
379 data: Data to replace with
380 do_compress: True to compress the data if needed, False if data is
381 already compressed so should be used as is
382 allow_resize: True to allow entries to change size (this does a re-pack
383 of the entries), False to raise an exception
384 write_map: True to write a map file
385
386 Returns:
387 Image object that was updated
388 """
389 tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname))
390 image = Image.FromFile(image_fname)
391 image.CollectBintools()
392 entry = image.FindEntryPath(entry_path)
393 WriteEntryToImage(image, entry, data, do_compress=do_compress,
394 allow_resize=allow_resize, write_map=write_map)
395
396 return image
397
398
399def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
400 do_compress=True, allow_resize=True, write_map=False):
401 """Replace the data from one or more entries from input files
402
403 Args:
404 image_fname: Image filename to process
405 input_fname: Single input filename to use if replacing one file, None
406 otherwise
407 indir: Input directory to use (for any number of files), else None
408 entry_paths: List of entry paths to replace
409 do_compress: True if the input data is uncompressed and may need to be
410 compressed if the entry requires it, False if the data is already
411 compressed.
412 write_map: True to write a map file
413
414 Returns:
415 List of EntryInfo records that were written
416 """
417 image_fname = os.path.abspath(image_fname)
418 image = Image.FromFile(image_fname)
419
420 image.mark_build_done()
421
422 # Replace an entry from a single file, as a special case
423 if input_fname:
424 if not entry_paths:
425 raise ValueError('Must specify an entry path to read with -f')
426 if len(entry_paths) != 1:
427 raise ValueError('Must specify exactly one entry path to write with -f')
428 entry = image.FindEntryPath(entry_paths[0])
429 data = tools.read_file(input_fname)
430 tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
431 WriteEntryToImage(image, entry, data, do_compress=do_compress,
432 allow_resize=allow_resize, write_map=write_map)
433 return
434
435 # Otherwise we will input from a path given by the entry path of each entry.
436 # This means that files must appear in subdirectories if they are part of
437 # a sub-section.
438 einfos = image.GetListEntries(entry_paths)[0]
439 tout.notice("Replacing %d matching entries in image '%s'" %
440 (len(einfos), image_fname))
441
442 BeforeReplace(image, allow_resize)
443
444 for einfo in einfos:
445 entry = einfo.entry
446 if entry.GetEntries():
447 tout.info("Skipping section entry '%s'" % entry.GetPath())
448 continue
449
450 path = entry.GetPath()[1:]
451 fname = os.path.join(indir, path)
452
453 if os.path.exists(fname):
454 tout.notice("Write entry '%s' from file '%s'" %
455 (entry.GetPath(), fname))
456 data = tools.read_file(fname)
457 ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
458 else:
459 tout.warning("Skipping entry '%s' from missing file '%s'" %
460 (entry.GetPath(), fname))
461
462 AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
463 return image
464
465def SignEntries(image_fname, input_fname, privatekey_fname, algo, entry_paths,
466 write_map=False):
467 """Sign and replace the data from one or more entries from input files
468
469 Args:
470 image_fname: Image filename to process
471 input_fname: Single input filename to use if replacing one file, None
472 otherwise
473 algo: Hashing algorithm
474 entry_paths: List of entry paths to sign
475 privatekey_fname: Private key filename
476 write_map (bool): True to write the map file
477 """
478 image_fname = os.path.abspath(image_fname)
479 image = Image.FromFile(image_fname)
480
481 image.mark_build_done()
482
483 BeforeReplace(image, allow_resize=True)
484
485 for entry_path in entry_paths:
486 entry = image.FindEntryPath(entry_path)
487 entry.UpdateSignatures(privatekey_fname, algo, input_fname)
488
489 AfterReplace(image, allow_resize=True, write_map=write_map)
490
491def _ProcessTemplates(parent):
492 """Handle any templates in the binman description
493
494 Args:
495 parent: Binman node to process (typically /binman)
496
497 Returns:
498 bool: True if any templates were processed
499
500 Search though each target node looking for those with an 'insert-template'
501 property. Use that as a list of references to template nodes to use to
502 adjust the target node.
503
504 Processing involves copying each subnode of the template node into the
505 target node.
506
507 This is done recursively, so templates can be at any level of the binman
508 image, e.g. inside a section.
509
510 See 'Templates' in the Binman documnentation for details.
511 """
512 found = False
513 for node in parent.subnodes:
514 tmpl = fdt_util.GetPhandleList(node, 'insert-template')
515 if tmpl:
516 node.copy_subnodes_from_phandles(tmpl)
517 found = True
518
519 found |= _ProcessTemplates(node)
520 return found
521
522def _RemoveTemplates(parent):
523 """Remove any templates in the binman description
524 """
525 for node in parent.subnodes:
526 if node.name.startswith('template'):
527 node.Delete()
528
529def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded, indir):
530 """Prepare the images to be processed and select the device tree
531
532 This function:
533 - reads in the device tree
534 - finds and scans the binman node to create all entries
535 - selects which images to build
536 - Updates the device tress with placeholder properties for offset,
537 image-pos, etc.
538
539 Args:
540 dtb_fname: Filename of the device tree file to use (.dts or .dtb)
541 selected_images: List of images to output, or None for all
542 update_fdt: True to update the FDT wth entry offsets, etc.
543 use_expanded: True to use expanded versions of entries, if available.
544 So if 'u-boot' is called for, we use 'u-boot-expanded' instead. This
545 is needed if update_fdt is True (although tests may disable it)
546 indir: List of directories where input files can be found
547
548 Returns:
549 OrderedDict of images:
550 key: Image name (str)
551 value: Image object
552 """
553 # Import these here in case libfdt.py is not available, in which case
554 # the above help option still works.
555 from dtoc import fdt
556 from dtoc import fdt_util
557 global images
558
559 # Get the device tree ready by compiling it and copying the compiled
560 # output into a file in our output directly. Then scan it for use
561 # in binman.
562 if indir is None:
563 indir = []
564 dtb_fname = fdt_util.EnsureCompiled(dtb_fname, indir=indir)
565 fname = tools.get_output_filename('u-boot.dtb.out')
566 tools.write_file(fname, tools.read_file(dtb_fname))
567 dtb = fdt.FdtScan(fname)
568
569 node = _FindBinmanNode(dtb)
570 if not node:
571 raise ValueError("Device tree '%s' does not have a 'binman' "
572 "node" % dtb_fname)
573
574 if _ProcessTemplates(node):
575 dtb.Sync(True)
576 fname = tools.get_output_filename('u-boot.dtb.tmpl1')
577 tools.write_file(fname, dtb.GetContents())
578
579 _RemoveTemplates(node)
580 dtb.Sync(True)
581
582 # Rescan the dtb to pick up the new phandles
583 dtb.Scan()
584 node = _FindBinmanNode(dtb)
585 fname = tools.get_output_filename('u-boot.dtb.tmpl2')
586 tools.write_file(fname, dtb.GetContents())
587
588 images = _ReadImageDesc(node, use_expanded)
589
590 if select_images:
591 skip = []
592 new_images = OrderedDict()
593 for name, image in images.items():
594 if name in select_images:
595 new_images[name] = image
596 else:
597 skip.append(name)
598 images = new_images
599 tout.notice('Skipping images: %s' % ', '.join(skip))
600
601 state.Prepare(images, dtb)
602
603 # Prepare the device tree by making sure that any missing
604 # properties are added (e.g. 'pos' and 'size'). The values of these
605 # may not be correct yet, but we add placeholders so that the
606 # size of the device tree is correct. Later, in
607 # SetCalculatedProperties() we will insert the correct values
608 # without changing the device-tree size, thus ensuring that our
609 # entry offsets remain the same.
610 for image in images.values():
611 image.gen_entries()
612 image.CollectBintools()
613 if update_fdt:
614 image.AddMissingProperties(True)
615 image.ProcessFdt(dtb)
616
617 for dtb_item in state.GetAllFdts():
618 dtb_item.Sync(auto_resize=True)
619 dtb_item.Pack()
620 dtb_item.Flush()
621 return images
622
623def CheckForProblems(image):
624 """Check for problems with image generation
625
626 Shows warning about missing, faked or optional external blobs, as well as
627 missing bintools.
628
629 Args:
630 image (Image): Image to process
631
632 Returns:
633 bool: True if there are any problems which result in a non-functional
634 image
635 """
636 missing_list = []
637 image.CheckMissing(missing_list)
638 if missing_list:
639 tout.error("Image '%s' is missing external blobs and is non-functional: %s\n" %
640 (image.name, ' '.join([e.name for e in missing_list])))
641 _ShowHelpForMissingBlobs(tout.ERROR, missing_list)
642
643 faked_list = []
644 image.CheckFakedBlobs(faked_list)
645 if faked_list:
646 tout.warning(
647 "Image '%s' has faked external blobs and is non-functional: %s\n" %
648 (image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
649 for e in faked_list])))
650
651 optional_list = []
652 image.CheckOptional(optional_list)
653 if optional_list:
654 tout.warning(
655 "Image '%s' is missing optional external blobs but is still functional: %s\n" %
656 (image.name, ' '.join([e.name for e in optional_list])))
657 _ShowHelpForMissingBlobs(tout.WARNING, optional_list)
658
659 missing_bintool_list = []
660 image.check_missing_bintools(missing_bintool_list)
661 if missing_bintool_list:
662 tout.warning(
663 "Image '%s' has missing bintools and is non-functional: %s\n" %
664 (image.name, ' '.join([os.path.basename(bintool.name)
665 for bintool in missing_bintool_list])))
666 return any([missing_list, faked_list, missing_bintool_list])
667
668def ProcessImage(image, update_fdt, write_map, get_contents=True,
669 allow_resize=True, allow_missing=False,
670 allow_fake_blobs=False):
671 """Perform all steps for this image, including checking and # writing it.
672
673 This means that errors found with a later image will be reported after
674 earlier images are already completed and written, but that does not seem
675 important.
676
677 Args:
678 image: Image to process
679 update_fdt: True to update the FDT wth entry offsets, etc.
680 write_map: True to write a map file
681 get_contents: True to get the image contents from files, etc., False if
682 the contents is already present
683 allow_resize: True to allow entries to change size (this does a re-pack
684 of the entries), False to raise an exception
685 allow_missing: Allow blob_ext objects to be missing
686 allow_fake_blobs: Allow blob_ext objects to be faked with dummy files
687
688 Returns:
689 True if one or more external blobs are missing or faked,
690 False if all are present
691 """
692 if get_contents:
693 image.SetAllowMissing(allow_missing)
694 image.SetAllowFakeBlob(allow_fake_blobs)
695 image.GetEntryContents()
696 image.drop_absent()
697 image.GetEntryOffsets()
698
699 # We need to pack the entries to figure out where everything
700 # should be placed. This sets the offset/size of each entry.
701 # However, after packing we call ProcessEntryContents() which
702 # may result in an entry changing size. In that case we need to
703 # do another pass. Since the device tree often contains the
704 # final offset/size information we try to make space for this in
705 # AddMissingProperties() above. However, if the device is
706 # compressed we cannot know this compressed size in advance,
707 # since changing an offset from 0x100 to 0x104 (for example) can
708 # alter the compressed size of the device tree. So we need a
709 # third pass for this.
710 passes = 5
711 for pack_pass in range(passes):
712 try:
713 image.PackEntries()
714 except Exception as e:
715 if write_map:
716 fname = image.WriteMap()
717 print("Wrote map file '%s' to show errors" % fname)
718 raise
719 image.SetImagePos()
720 if update_fdt:
721 image.SetCalculatedProperties()
722 for dtb_item in state.GetAllFdts():
723 dtb_item.Sync()
724 dtb_item.Flush()
725 image.WriteSymbols()
726 sizes_ok = image.ProcessEntryContents()
727 if sizes_ok:
728 break
729 image.ResetForPack()
730 tout.info('Pack completed after %d pass(es)' % (pack_pass + 1))
731 if not sizes_ok:
732 image.Raise('Entries changed size after packing (tried %s passes)' %
733 passes)
734
735 image.BuildImage()
736 if write_map:
737 image.WriteMap()
738
739 has_problems = CheckForProblems(image)
740
741 image.WriteAlternates()
742
743 return has_problems
744
745def Binman(args):
746 """The main control code for binman
747
748 This assumes that help and test options have already been dealt with. It
749 deals with the core task of building images.
750
751 Args:
752 args: Command line arguments Namespace object
753 """
754 global Image
755 global state
756
757 if args.full_help:
758 with importlib.resources.path('binman', 'README.rst') as readme:
759 tools.print_full_help(str(readme))
760 return 0
761
762 # Put these here so that we can import this module without libfdt
763 from binman.image import Image
764 from binman import state
765
766 tool_paths = []
767 if args.toolpath:
768 tool_paths += args.toolpath
769 if args.tooldir:
770 tool_paths.append(args.tooldir)
771 tools.set_tool_paths(tool_paths or None)
772 bintool.Bintool.set_tool_dir(args.tooldir)
773
774 if args.cmd in ['ls', 'extract', 'replace', 'tool', 'sign']:
775 try:
776 tout.init(args.verbosity)
777 if args.cmd == 'replace':
778 tools.prepare_output_dir(args.outdir, args.preserve)
779 else:
780 tools.prepare_output_dir(None)
781 if args.cmd == 'ls':
782 ListEntries(args.image, args.paths)
783
784 if args.cmd == 'extract':
785 ExtractEntries(args.image, args.filename, args.outdir, args.paths,
786 not args.uncompressed, args.format)
787
788 if args.cmd == 'replace':
789 ReplaceEntries(args.image, args.filename, args.indir, args.paths,
790 do_compress=not args.compressed,
791 allow_resize=not args.fix_size, write_map=args.map)
792
793 if args.cmd == 'sign':
794 SignEntries(args.image, args.file, args.key, args.algo, args.paths)
795
796 if args.cmd == 'tool':
797 if args.list:
798 bintool.Bintool.list_all()
799 elif args.fetch:
800 if not args.bintools:
801 raise ValueError(
802 "Please specify bintools to fetch or 'all' or 'missing'")
803 bintool.Bintool.fetch_tools(bintool.FETCH_ANY,
804 args.bintools)
805 else:
806 raise ValueError("Invalid arguments to 'tool' subcommand")
807 except:
808 raise
809 finally:
810 tools.finalise_output_dir()
811 return 0
812
813 elf_params = None
814 if args.update_fdt_in_elf:
815 elf_params = args.update_fdt_in_elf.split(',')
816 if len(elf_params) != 4:
817 raise ValueError('Invalid args %s to --update-fdt-in-elf: expected infile,outfile,begin_sym,end_sym' %
818 elf_params)
819
820 # Try to figure out which device tree contains our image description
821 if args.dt:
822 dtb_fname = args.dt
823 else:
824 board = args.board
825 if not board:
826 raise ValueError('Must provide a board to process (use -b <board>)')
827 board_pathname = os.path.join(args.build_dir, board)
828 dtb_fname = os.path.join(board_pathname, 'u-boot.dtb')
829 if not args.indir:
830 args.indir = ['.']
831 args.indir.append(board_pathname)
832
833 try:
834 tout.init(args.verbosity)
835 elf.debug = args.debug
836 cbfs_util.VERBOSE = args.verbosity > 2
837 state.use_fake_dtb = args.fake_dtb
838
839 # Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
840 # When running tests this can be disabled using this flag. When not
841 # updating the FDT in image, it is not needed by binman, but we use it
842 # for consistency, so that the images look the same to U-Boot at
843 # runtime.
844 use_expanded = not args.no_expanded
845 try:
846 tools.set_input_dirs(args.indir)
847 tools.prepare_output_dir(args.outdir, args.preserve)
848 state.SetEntryArgs(args.entry_arg)
849 state.SetThreads(args.threads)
850
851 images = PrepareImagesAndDtbs(dtb_fname, args.image,
852 args.update_fdt, use_expanded, args.indir)
853
854 if args.test_section_timeout:
855 # Set the first image to timeout, used in testThreadTimeout()
856 images[list(images.keys())[0]].test_section_timeout = True
857 invalid = False
858 bintool.Bintool.set_missing_list(
859 args.force_missing_bintools.split(',') if
860 args.force_missing_bintools else None)
861
862 # Create the directory here instead of Entry.check_fake_fname()
863 # since that is called from a threaded context so different threads
864 # may race to create the directory
865 if args.fake_ext_blobs:
866 entry.Entry.create_fake_dir()
867
868 for image in images.values():
869 invalid |= ProcessImage(image, args.update_fdt, args.map,
870 allow_missing=args.allow_missing,
871 allow_fake_blobs=args.fake_ext_blobs)
872
873 # Write the updated FDTs to our output files
874 for dtb_item in state.GetAllFdts():
875 tools.write_file(dtb_item._fname, dtb_item.GetContents())
876
877 if elf_params:
878 data = state.GetFdtForEtype('u-boot-dtb').GetContents()
879 elf.UpdateFile(*elf_params, data)
880
881 bintool.Bintool.set_missing_list(None)
882
883 # This can only be True if -M is provided, since otherwise binman
884 # would have raised an error already
885 if invalid:
886 msg = 'Some images are invalid'
887 if args.ignore_missing:
888 tout.warning(msg)
889 else:
890 tout.error(msg)
891 return 103
892
893 # Use this to debug the time take to pack the image
894 #state.TimingShow()
895 finally:
896 tools.finalise_output_dir()
897 finally:
898 tout.uninit()
899
900 return 0