diff --git a/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py b/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py index dbf4dd3dc..1ec12f0b0 100755 --- a/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py +++ b/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py @@ -17,6 +17,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import division, print_function +from builtins import int, range, bytes +from io import open import sys import argparse import binascii @@ -26,10 +29,13 @@ import os import array import csv import zlib +import codecs from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.backends import default_backend """ Class for standard NVS page structure """ + + class Page(object): PAGE_PARAMS = { "max_size": 4096, @@ -59,8 +65,8 @@ class Page(object): CHUNK_ANY = 0xFF ACTIVE = 0xFFFFFFFE FULL = 0xFFFFFFFC - VERSION1=0xFF - VERSION2=0xFE + VERSION1 = 0xFF + VERSION2 = 0xFE def __init__(self, page_num): self.entry_num = 0 @@ -76,85 +82,83 @@ class Page(object): global page_header # set page state to active - page_header= bytearray(b'\xff')*32 + page_header = bytearray(b'\xff') * 32 page_state_active_seq = Page.ACTIVE - page_header[0:4] = struct.pack('= 0, "Page overflow!!" # Split the binary data into two and store a chunk of available size onto curr page if tailroom < remaining_size: @@ -245,8 +250,8 @@ class Page(object): # Calculate no. of entries data chunk will require datachunk_rounded_size = (chunk_size + 31) & ~31 - datachunk_entry_count = datachunk_rounded_size / 32 - datachunk_total_entry_count = datachunk_entry_count + 1 # +1 for the entry header + datachunk_entry_count = datachunk_rounded_size // 32 + datachunk_total_entry_count = datachunk_entry_count + 1 # +1 for the entry header # Set Span entry_struct[2] = datachunk_total_entry_count @@ -256,12 +261,15 @@ class Page(object): entry_struct[3] = chunk_index # Set data chunk - data_chunk = data[offset:offset + chunk_size] + data_chunk = data[offset:offset + chunk_size] # Compute CRC of data chunk - entry_struct[24:26] = struct.pack(' Page.PAGE_PARAMS["max_old_blob_size"]: - raise InputError("%s: Size exceeds max allowed length." % key) - - if version == Page.VERSION2: - if encoding == "string": - if datalen > Page.PAGE_PARAMS["max_new_blob_size"]: - raise InputError("%s: Size exceeds max allowed length." % key) + if datalen > Page.PAGE_PARAMS["max_old_blob_size"]: + if version == Page.VERSION1: + raise InputError("Version %s\n%s: Size exceeds max allowed length." % (VERSION1_PRINT,key)) + else: + if encoding == "string": + raise InputError("Version %s\n%s: Size exceeds max allowed length." % (VERSION2_PRINT,key)) # Calculate no. of entries data will require rounded_size = (datalen + 31) & ~31 - data_entry_count = rounded_size / 32 - total_entry_count = data_entry_count + 1 # +1 for the entry header + data_entry_count = rounded_size // 32 + total_entry_count = data_entry_count + 1 # +1 for the entry header # Check if page is already full and new page is needed to be created right away - if encoding == "string": - if (self.entry_num + total_entry_count) >= Page.PAGE_PARAMS["max_entries"]: + if self.entry_num >= Page.PAGE_PARAMS["max_entries"]: + raise PageFullError() + elif (self.entry_num + total_entry_count) >= Page.PAGE_PARAMS["max_entries"]: + if not (version == Page.VERSION2 and encoding in ["hex2bin", "binary", "base64"]): raise PageFullError() # Entry header - entry_struct = bytearray('\xff')*32 + entry_struct = bytearray(b'\xff') * 32 # Set Namespace Index entry_struct[0] = ns_index # Set Span @@ -370,9 +375,9 @@ class Page(object): entry_struct[2] = data_entry_count + 1 # set key - key_array = bytearray('\x00')*16 + key_array = b'\x00' * 16 entry_struct[8:24] = key_array - entry_struct[8:8 + len(key)] = key + entry_struct[8:8 + len(key)] = key.encode() # set Type if encoding == "string": @@ -380,53 +385,52 @@ class Page(object): elif encoding in ["hex2bin", "binary", "base64"]: entry_struct[1] = Page.BLOB - if version == Page.VERSION2 and (encoding in ["hex2bin", "binary", "base64"]): - entry_struct = self.write_varlen_binary_data(entry_struct,ns_index,key,data,\ - datalen,total_entry_count, nvs_obj) + if version == Page.VERSION2 and (encoding in ["hex2bin", "binary", "base64"]): + entry_struct = self.write_varlen_binary_data(entry_struct,ns_index,key,data, + datalen,total_entry_count, encoding, nvs_obj) else: self.write_single_page_entry(entry_struct, data, datalen, data_entry_count, nvs_obj) - - """ Low-level function to write data of primitive type into page buffer. """ def write_primitive_data(self, key, data, encoding, ns_index,nvs_obj): # Check if entry exceeds max number of entries allowed per page if self.entry_num >= Page.PAGE_PARAMS["max_entries"]: raise PageFullError() - entry_struct = bytearray('\xff')*32 - entry_struct[0] = ns_index # namespace index - entry_struct[2] = 0x01 # Span + entry_struct = bytearray(b'\xff') * 32 + entry_struct[0] = ns_index # namespace index + entry_struct[2] = 0x01 # Span chunk_index = Page.CHUNK_ANY entry_struct[3] = chunk_index # write key - key_array = bytearray('\x00')*16 + key_array = b'\x00' * 16 entry_struct[8:24] = key_array - entry_struct[8:8 + len(key)] = key + entry_struct[8:8 + len(key)] = key.encode() if encoding == "u8": entry_struct[1] = Page.U8 - entry_struct[24] = struct.pack('=3.0 future>=0.15.2 -cryptography +cryptography>=2.1.4