binman: Support replacing data in a cbfs
authorSimon Glass <sjg@chromium.org>
Sat, 20 Jul 2019 18:24:06 +0000 (12:24 -0600)
committerSimon Glass <sjg@chromium.org>
Mon, 29 Jul 2019 15:38:06 +0000 (09:38 -0600)
At present binman cannot replace data within a CBFS since it does not
allow rewriting of the files in that CBFS. Implement this by using the
new WriteData() method to handle the case.

Add a header to compressed data so that the amount of compressed data can
be determined without reference to the size of the containing entry. This
allows the entry to be larger that the contents, without causing errors in
decompression. This is necessary to cope with a compressed device tree
being updated in such a way that it shrinks after the entry size is
already set (an obscure case). It is not used with CBFS since it has its
own metadata for this. Increase the number of passes allowed to resolve
the position of entries, to handle this case.

Add a test for this new logic.

Signed-off-by: Simon Glass <sjg@chromium.org>
tools/binman/cbfs_util.py
tools/binman/control.py
tools/binman/entry_test.py
tools/binman/etype/cbfs.py
tools/binman/ftest.py
tools/binman/test/142_replace_cbfs.dts [new file with mode: 0644]
tools/patman/tools.py

index 6d9a876ee85214acfc7a3a4d9c1d4ebcbe24fa8b..99d77878c9a648346c5db432634f82f2a4a2d301 100644 (file)
@@ -208,6 +208,7 @@ class CbfsFile(object):
         cbfs_offset: Offset of file data in bytes from start of CBFS, or None to
             place this file anyway
         data: Contents of file, uncompressed
+        orig_data: Original data added to the file, possibly compressed
         data_len: Length of (possibly compressed) data in bytes
         ftype: File type (TYPE_...)
         compression: Compression type (COMPRESS_...)
@@ -226,6 +227,7 @@ class CbfsFile(object):
         self.offset = None
         self.cbfs_offset = cbfs_offset
         self.data = data
+        self.orig_data = data
         self.ftype = ftype
         self.compress = compress
         self.memlen = None
@@ -240,9 +242,9 @@ class CbfsFile(object):
         """Handle decompressing data if necessary"""
         indata = self.data
         if self.compress == COMPRESS_LZ4:
-            data = tools.Decompress(indata, 'lz4')
+            data = tools.Decompress(indata, 'lz4', with_header=False)
         elif self.compress == COMPRESS_LZMA:
-            data = tools.Decompress(indata, 'lzma')
+            data = tools.Decompress(indata, 'lzma', with_header=False)
         else:
             data = indata
         self.memlen = len(data)
@@ -361,9 +363,9 @@ class CbfsFile(object):
         elif self.ftype == TYPE_RAW:
             orig_data = data
             if self.compress == COMPRESS_LZ4:
-                data = tools.Compress(orig_data, 'lz4')
+                data = tools.Compress(orig_data, 'lz4', with_header=False)
             elif self.compress == COMPRESS_LZMA:
-                data = tools.Compress(orig_data, 'lzma')
+                data = tools.Compress(orig_data, 'lzma', with_header=False)
             self.memlen = len(orig_data)
             self.data_len = len(data)
             attr = struct.pack(ATTR_COMPRESSION_FORMAT,
index 22e3e306e54326a6216084fd11bf854ba760e48c..9c8bc6253fc65dc010f63b3a947d1836aa5139f1 100644 (file)
@@ -311,7 +311,7 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
     # since changing an offset from 0x100 to 0x104 (for example) can
     # alter the compressed size of the device tree. So we need a
     # third pass for this.
-    passes = 3
+    passes = 5
     for pack_pass in range(passes):
         try:
             image.PackEntries()
index ee729f37519b9fe6cad7321042067d2a16ee2aa1..cc1fb795da575e71b09e364d0dd6cd78e405a409 100644 (file)
@@ -92,6 +92,11 @@ class TestEntry(unittest.TestCase):
         dtb = entry.Entry.Create(None, self.GetNode(), 'u-boot-dtb')
         self.assertEqual('u-boot-dtb', dtb.GetFdtEtype())
 
+    def testWriteChildData(self):
+        """Test the WriteChildData() method of the base class"""
+        base = entry.Entry.Create(None, self.GetNode(), 'blob-dtb')
+        self.assertTrue(base.WriteChildData(base))
+
 
 if __name__ == "__main__":
     unittest.main()
index 0109fdbb918cdabd28d1bc4c5ac3d7c52e701e6b..28a9c81a8ad74511b0bbdebd328061522af3d5a2 100644 (file)
@@ -168,6 +168,7 @@ class Entry_cbfs(Entry):
         self._cbfs_arg = fdt_util.GetString(node, 'cbfs-arch', 'x86')
         self._cbfs_entries = OrderedDict()
         self._ReadSubnodes()
+        self.reader = None
 
     def ObtainContents(self, skip=None):
         arch = cbfs_util.find_arch(self._cbfs_arg)
@@ -202,7 +203,7 @@ class Entry_cbfs(Entry):
     def _ReadSubnodes(self):
         """Read the subnodes to find out what should go in this IFWI"""
         for node in self._node.subnodes:
-            entry = Entry.Create(self.section, node)
+            entry = Entry.Create(self, node)
             entry.ReadNode()
             entry._cbfs_name = fdt_util.GetString(node, 'cbfs-name', entry.name)
             entry._type = fdt_util.GetString(node, 'cbfs-type')
index d1ecd65c2c36ed4b49993701e46208ccff8557e5..04bd9f886c5fa2dbb532ec13ecf7cb42dd10f0e8 100644 (file)
@@ -2485,7 +2485,7 @@ class TestFunctional(unittest.TestCase):
     def testExtractCbfsRaw(self):
         """Test extracting CBFS compressed data without decompressing it"""
         data = self._RunExtractCmd('section/cbfs/u-boot-dtb', decomp=False)
-        dtb = tools.Decompress(data, 'lzma')
+        dtb = tools.Decompress(data, 'lzma', with_header=False)
         self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
 
     def testExtractBadEntry(self):
@@ -2984,6 +2984,32 @@ class TestFunctional(unittest.TestCase):
         self.assertEqual(0xff800000, desc.offset);
         self.assertEqual(0xff800000, desc.image_pos);
 
+    def testReplaceCbfs(self):
+        """Test replacing a single file in CBFS without changing the size"""
+        self._CheckLz4()
+        expected = b'x' * len(U_BOOT_DATA)
+        data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
+        updated_fname = tools.GetOutputFilename('image-updated.bin')
+        tools.WriteFile(updated_fname, data)
+        entry_name = 'section/cbfs/u-boot'
+        control.WriteEntry(updated_fname, entry_name, expected,
+                           allow_resize=True)
+        data = control.ReadEntry(updated_fname, entry_name)
+        self.assertEqual(expected, data)
+
+    def testReplaceResizeCbfs(self):
+        """Test replacing a single file in CBFS with one of a different size"""
+        self._CheckLz4()
+        expected = U_BOOT_DATA + b'x'
+        data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
+        updated_fname = tools.GetOutputFilename('image-updated.bin')
+        tools.WriteFile(updated_fname, data)
+        entry_name = 'section/cbfs/u-boot'
+        control.WriteEntry(updated_fname, entry_name, expected,
+                           allow_resize=True)
+        data = control.ReadEntry(updated_fname, entry_name)
+        self.assertEqual(expected, data)
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/tools/binman/test/142_replace_cbfs.dts b/tools/binman/test/142_replace_cbfs.dts
new file mode 100644 (file)
index 0000000..d64142f
--- /dev/null
@@ -0,0 +1,37 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+       #address-cells = <1>;
+       #size-cells = <1>;
+
+       binman {
+               size = <0xe00>;
+               allow-repack;
+               u-boot {
+               };
+               section {
+                       align = <0x100>;
+                       cbfs {
+                               size = <0x400>;
+                               u-boot {
+                                       cbfs-type = "raw";
+                               };
+                               u-boot-dtb {
+                                       cbfs-type = "raw";
+                                       cbfs-compress = "lzma";
+                                       cbfs-offset = <0x80>;
+                               };
+                       };
+                       u-boot-dtb {
+                               compress = "lz4";
+                       };
+               };
+               fdtmap {
+               };
+               image-header {
+                       location = "end";
+               };
+       };
+};
index f492dc8f8e3018abf1ffa16d48195f60dc63ee43..d615227482a06bd96932e91640fb6a25772a6557 100644 (file)
@@ -9,6 +9,7 @@ import command
 import glob
 import os
 import shutil
+import struct
 import sys
 import tempfile
 
@@ -377,7 +378,7 @@ def ToBytes(string):
         return string.encode('utf-8')
     return string
 
-def Compress(indata, algo):
+def Compress(indata, algo, with_header=True):
     """Compress some data using a given algorithm
 
     Note that for lzma this uses an old version of the algorithm, not that
@@ -408,9 +409,12 @@ def Compress(indata, algo):
         data = Run('gzip', '-c', fname, binary=True)
     else:
         raise ValueError("Unknown algorithm '%s'" % algo)
+    if with_header:
+        hdr = struct.pack('<I', len(data))
+        data = hdr + data
     return data
 
-def Decompress(indata, algo):
+def Decompress(indata, algo, with_header=True):
     """Decompress some data using a given algorithm
 
     Note that for lzma this uses an old version of the algorithm, not that
@@ -428,6 +432,9 @@ def Decompress(indata, algo):
     """
     if algo == 'none':
         return indata
+    if with_header:
+        data_len = struct.unpack('<I', indata[:4])[0]
+        indata = indata[4:4 + data_len]
     fname = GetOutputFilename('%s.decomp.tmp' % algo)
     with open(fname, 'wb') as fd:
         fd.write(indata)